Fix typo in builtins.c.
[official-gcc.git] / gcc / builtins.c
blob14cfa57ad68571c2b2494e1420850dfdb1cd3ecd
1 /* Expand builtin functions.
2 Copyright (C) 1988-2021 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it under
7 the terms of the GNU General Public License as published by the Free
8 Software Foundation; either version 3, or (at your option) any later
9 version.
11 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
12 WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Legacy warning! Please add no further builtin simplifications here
21 (apart from pure constant folding) - builtin simplifications should go
22 to match.pd or gimple-fold.c instead. */
24 #include "config.h"
25 #include "system.h"
26 #include "coretypes.h"
27 #include "backend.h"
28 #include "target.h"
29 #include "rtl.h"
30 #include "tree.h"
31 #include "memmodel.h"
32 #include "gimple.h"
33 #include "predict.h"
34 #include "tm_p.h"
35 #include "stringpool.h"
36 #include "tree-vrp.h"
37 #include "tree-ssanames.h"
38 #include "expmed.h"
39 #include "optabs.h"
40 #include "emit-rtl.h"
41 #include "recog.h"
42 #include "diagnostic-core.h"
43 #include "alias.h"
44 #include "fold-const.h"
45 #include "fold-const-call.h"
46 #include "gimple-ssa-warn-restrict.h"
47 #include "stor-layout.h"
48 #include "calls.h"
49 #include "varasm.h"
50 #include "tree-object-size.h"
51 #include "tree-ssa-strlen.h"
52 #include "realmpfr.h"
53 #include "cfgrtl.h"
54 #include "except.h"
55 #include "dojump.h"
56 #include "explow.h"
57 #include "stmt.h"
58 #include "expr.h"
59 #include "libfuncs.h"
60 #include "output.h"
61 #include "typeclass.h"
62 #include "langhooks.h"
63 #include "value-prof.h"
64 #include "builtins.h"
65 #include "stringpool.h"
66 #include "attribs.h"
67 #include "asan.h"
68 #include "internal-fn.h"
69 #include "case-cfn-macros.h"
70 #include "gimple-fold.h"
71 #include "intl.h"
72 #include "file-prefix-map.h" /* remap_macro_filename() */
73 #include "gomp-constants.h"
74 #include "omp-general.h"
75 #include "tree-dfa.h"
76 #include "gimple-iterator.h"
77 #include "gimple-ssa.h"
78 #include "tree-ssa-live.h"
79 #include "tree-outof-ssa.h"
80 #include "attr-fnspec.h"
81 #include "demangle.h"
83 struct target_builtins default_target_builtins;
84 #if SWITCHABLE_TARGET
85 struct target_builtins *this_target_builtins = &default_target_builtins;
86 #endif
88 /* Define the names of the builtin function types and codes. */
89 const char *const built_in_class_names[BUILT_IN_LAST]
90 = {"NOT_BUILT_IN", "BUILT_IN_FRONTEND", "BUILT_IN_MD", "BUILT_IN_NORMAL"};
92 #define DEF_BUILTIN(X, N, C, T, LT, B, F, NA, AT, IM, COND) #X,
93 const char * built_in_names[(int) END_BUILTINS] =
95 #include "builtins.def"
98 /* Setup an array of builtin_info_type, make sure each element decl is
99 initialized to NULL_TREE. */
100 builtin_info_type builtin_info[(int)END_BUILTINS];
102 /* Non-zero if __builtin_constant_p should be folded right away. */
103 bool force_folding_builtin_constant_p;
105 static int target_char_cast (tree, char *);
106 static rtx get_memory_rtx (tree, tree);
107 static int apply_args_size (void);
108 static int apply_result_size (void);
109 static rtx result_vector (int, rtx);
110 static void expand_builtin_prefetch (tree);
111 static rtx expand_builtin_apply_args (void);
112 static rtx expand_builtin_apply_args_1 (void);
113 static rtx expand_builtin_apply (rtx, rtx, rtx);
114 static void expand_builtin_return (rtx);
115 static enum type_class type_to_class (tree);
116 static rtx expand_builtin_classify_type (tree);
117 static rtx expand_builtin_mathfn_3 (tree, rtx, rtx);
118 static rtx expand_builtin_mathfn_ternary (tree, rtx, rtx);
119 static rtx expand_builtin_interclass_mathfn (tree, rtx);
120 static rtx expand_builtin_sincos (tree);
121 static rtx expand_builtin_cexpi (tree, rtx);
122 static rtx expand_builtin_int_roundingfn (tree, rtx);
123 static rtx expand_builtin_int_roundingfn_2 (tree, rtx);
124 static rtx expand_builtin_next_arg (void);
125 static rtx expand_builtin_va_start (tree);
126 static rtx expand_builtin_va_end (tree);
127 static rtx expand_builtin_va_copy (tree);
128 static rtx inline_expand_builtin_bytecmp (tree, rtx);
129 static rtx expand_builtin_strcmp (tree, rtx);
130 static rtx expand_builtin_strncmp (tree, rtx, machine_mode);
131 static rtx expand_builtin_memchr (tree, rtx);
132 static rtx expand_builtin_memcpy (tree, rtx);
133 static rtx expand_builtin_memory_copy_args (tree dest, tree src, tree len,
134 rtx target, tree exp,
135 memop_ret retmode,
136 bool might_overlap);
137 static rtx expand_builtin_memmove (tree, rtx);
138 static rtx expand_builtin_mempcpy (tree, rtx);
139 static rtx expand_builtin_mempcpy_args (tree, tree, tree, rtx, tree, memop_ret);
140 static rtx expand_builtin_strcat (tree);
141 static rtx expand_builtin_strcpy (tree, rtx);
142 static rtx expand_builtin_strcpy_args (tree, tree, tree, rtx);
143 static rtx expand_builtin_stpcpy (tree, rtx, machine_mode);
144 static rtx expand_builtin_stpncpy (tree, rtx);
145 static rtx expand_builtin_strncat (tree, rtx);
146 static rtx expand_builtin_strncpy (tree, rtx);
147 static rtx expand_builtin_memset (tree, rtx, machine_mode);
148 static rtx expand_builtin_memset_args (tree, tree, tree, rtx, machine_mode, tree);
149 static rtx expand_builtin_bzero (tree);
150 static rtx expand_builtin_strlen (tree, rtx, machine_mode);
151 static rtx expand_builtin_strnlen (tree, rtx, machine_mode);
152 static rtx expand_builtin_alloca (tree);
153 static rtx expand_builtin_unop (machine_mode, tree, rtx, rtx, optab);
154 static rtx expand_builtin_frame_address (tree, tree);
155 static tree stabilize_va_list_loc (location_t, tree, int);
156 static rtx expand_builtin_expect (tree, rtx);
157 static rtx expand_builtin_expect_with_probability (tree, rtx);
158 static tree fold_builtin_constant_p (tree);
159 static tree fold_builtin_classify_type (tree);
160 static tree fold_builtin_strlen (location_t, tree, tree, tree);
161 static tree fold_builtin_inf (location_t, tree, int);
162 static tree rewrite_call_expr (location_t, tree, int, tree, int, ...);
163 static bool validate_arg (const_tree, enum tree_code code);
164 static rtx expand_builtin_fabs (tree, rtx, rtx);
165 static rtx expand_builtin_signbit (tree, rtx);
166 static tree fold_builtin_memcmp (location_t, tree, tree, tree);
167 static tree fold_builtin_isascii (location_t, tree);
168 static tree fold_builtin_toascii (location_t, tree);
169 static tree fold_builtin_isdigit (location_t, tree);
170 static tree fold_builtin_fabs (location_t, tree, tree);
171 static tree fold_builtin_abs (location_t, tree, tree);
172 static tree fold_builtin_unordered_cmp (location_t, tree, tree, tree, enum tree_code,
173 enum tree_code);
174 static tree fold_builtin_varargs (location_t, tree, tree*, int);
176 static tree fold_builtin_strpbrk (location_t, tree, tree, tree, tree);
177 static tree fold_builtin_strspn (location_t, tree, tree, tree);
178 static tree fold_builtin_strcspn (location_t, tree, tree, tree);
180 static rtx expand_builtin_object_size (tree);
181 static rtx expand_builtin_memory_chk (tree, rtx, machine_mode,
182 enum built_in_function);
183 static void maybe_emit_chk_warning (tree, enum built_in_function);
184 static void maybe_emit_sprintf_chk_warning (tree, enum built_in_function);
185 static tree fold_builtin_object_size (tree, tree);
186 static bool check_read_access (tree, tree, tree = NULL_TREE, int = 1);
187 static bool compute_objsize_r (tree, int, access_ref *, ssa_name_limit_t &,
188 pointer_query *);
190 unsigned HOST_WIDE_INT target_newline;
191 unsigned HOST_WIDE_INT target_percent;
192 static unsigned HOST_WIDE_INT target_c;
193 static unsigned HOST_WIDE_INT target_s;
194 char target_percent_c[3];
195 char target_percent_s[3];
196 char target_percent_s_newline[4];
197 static tree do_mpfr_remquo (tree, tree, tree);
198 static tree do_mpfr_lgamma_r (tree, tree, tree);
199 static void expand_builtin_sync_synchronize (void);
201 access_ref::access_ref (tree bound /* = NULL_TREE */,
202 bool minaccess /* = false */)
203 : ref (), eval ([](tree x){ return x; }), deref (), trail1special (true),
204 base0 (true), parmarray ()
206 /* Set to valid. */
207 offrng[0] = offrng[1] = 0;
208 /* Invalidate. */
209 sizrng[0] = sizrng[1] = -1;
211 /* Set the default bounds of the access and adjust below. */
212 bndrng[0] = minaccess ? 1 : 0;
213 bndrng[1] = HOST_WIDE_INT_M1U;
215 /* When BOUND is nonnull and a range can be extracted from it,
216 set the bounds of the access to reflect both it and MINACCESS.
217 BNDRNG[0] is the size of the minimum access. */
218 tree rng[2];
219 if (bound && get_size_range (bound, rng, SR_ALLOW_ZERO))
221 bndrng[0] = wi::to_offset (rng[0]);
222 bndrng[1] = wi::to_offset (rng[1]);
223 bndrng[0] = bndrng[0] > 0 && minaccess ? 1 : 0;
227 /* Return the PHI node REF refers to or null if it doesn't. */
229 gphi *
230 access_ref::phi () const
232 if (!ref || TREE_CODE (ref) != SSA_NAME)
233 return NULL;
235 gimple *def_stmt = SSA_NAME_DEF_STMT (ref);
236 if (gimple_code (def_stmt) != GIMPLE_PHI)
237 return NULL;
239 return as_a <gphi *> (def_stmt);
242 /* Determine and return the largest object to which *THIS. If *THIS
243 refers to a PHI and PREF is nonnull, fill *PREF with the details
244 of the object determined by compute_objsize(ARG, OSTYPE) for each
245 PHI argument ARG. */
247 tree
248 access_ref::get_ref (vec<access_ref> *all_refs,
249 access_ref *pref /* = NULL */,
250 int ostype /* = 1 */,
251 ssa_name_limit_t *psnlim /* = NULL */,
252 pointer_query *qry /* = NULL */) const
254 gphi *phi_stmt = this->phi ();
255 if (!phi_stmt)
256 return ref;
258 /* FIXME: Calling get_ref() with a null PSNLIM is dangerous and might
259 cause unbounded recursion. */
260 ssa_name_limit_t snlim_buf;
261 if (!psnlim)
262 psnlim = &snlim_buf;
264 if (!psnlim->visit_phi (ref))
265 return NULL_TREE;
267 /* Reflects the range of offsets of all PHI arguments refer to the same
268 object (i.e., have the same REF). */
269 access_ref same_ref;
270 /* The conservative result of the PHI reflecting the offset and size
271 of the largest PHI argument, regardless of whether or not they all
272 refer to the same object. */
273 pointer_query empty_qry;
274 if (!qry)
275 qry = &empty_qry;
277 access_ref phi_ref;
278 if (pref)
280 phi_ref = *pref;
281 same_ref = *pref;
284 /* Set if any argument is a function array (or VLA) parameter not
285 declared [static]. */
286 bool parmarray = false;
287 /* The size of the smallest object referenced by the PHI arguments. */
288 offset_int minsize = 0;
289 const offset_int maxobjsize = wi::to_offset (max_object_size ());
290 /* The offset of the PHI, not reflecting those of its arguments. */
291 const offset_int orng[2] = { phi_ref.offrng[0], phi_ref.offrng[1] };
293 const unsigned nargs = gimple_phi_num_args (phi_stmt);
294 for (unsigned i = 0; i < nargs; ++i)
296 access_ref phi_arg_ref;
297 tree arg = gimple_phi_arg_def (phi_stmt, i);
298 if (!compute_objsize_r (arg, ostype, &phi_arg_ref, *psnlim, qry)
299 || phi_arg_ref.sizrng[0] < 0)
300 /* A PHI with all null pointer arguments. */
301 return NULL_TREE;
303 /* Add PREF's offset to that of the argument. */
304 phi_arg_ref.add_offset (orng[0], orng[1]);
305 if (TREE_CODE (arg) == SSA_NAME)
306 qry->put_ref (arg, phi_arg_ref);
308 if (all_refs)
309 all_refs->safe_push (phi_arg_ref);
311 const bool arg_known_size = (phi_arg_ref.sizrng[0] != 0
312 || phi_arg_ref.sizrng[1] != maxobjsize);
314 parmarray |= phi_arg_ref.parmarray;
316 const bool nullp = integer_zerop (arg) && (i || i + 1 < nargs);
318 if (phi_ref.sizrng[0] < 0)
320 if (!nullp)
321 same_ref = phi_arg_ref;
322 phi_ref = phi_arg_ref;
323 if (arg_known_size)
324 minsize = phi_arg_ref.sizrng[0];
325 continue;
328 const bool phi_known_size = (phi_ref.sizrng[0] != 0
329 || phi_ref.sizrng[1] != maxobjsize);
331 if (phi_known_size && phi_arg_ref.sizrng[0] < minsize)
332 minsize = phi_arg_ref.sizrng[0];
334 /* Disregard null pointers in PHIs with two or more arguments.
335 TODO: Handle this better! */
336 if (nullp)
337 continue;
339 /* Determine the amount of remaining space in the argument. */
340 offset_int argrem[2];
341 argrem[1] = phi_arg_ref.size_remaining (argrem);
343 /* Determine the amount of remaining space computed so far and
344 if the remaining space in the argument is more use it instead. */
345 offset_int phirem[2];
346 phirem[1] = phi_ref.size_remaining (phirem);
348 if (phi_arg_ref.ref != same_ref.ref)
349 same_ref.ref = NULL_TREE;
351 if (phirem[1] < argrem[1]
352 || (phirem[1] == argrem[1]
353 && phi_ref.sizrng[1] < phi_arg_ref.sizrng[1]))
354 /* Use the argument with the most space remaining as the result,
355 or the larger one if the space is equal. */
356 phi_ref = phi_arg_ref;
358 /* Set SAME_REF.OFFRNG to the maximum range of all arguments. */
359 if (phi_arg_ref.offrng[0] < same_ref.offrng[0])
360 same_ref.offrng[0] = phi_arg_ref.offrng[0];
361 if (same_ref.offrng[1] < phi_arg_ref.offrng[1])
362 same_ref.offrng[1] = phi_arg_ref.offrng[1];
365 if (phi_ref.sizrng[0] < 0)
367 /* Fail if none of the PHI's arguments resulted in updating PHI_REF
368 (perhaps because they have all been already visited by prior
369 recursive calls). */
370 psnlim->leave_phi (ref);
371 return NULL_TREE;
374 if (!same_ref.ref && same_ref.offrng[0] != 0)
375 /* Clear BASE0 if not all the arguments refer to the same object and
376 if not all their offsets are zero-based. This allows the final
377 PHI offset to out of bounds for some arguments but not for others
378 (or negative even of all the arguments are BASE0), which is overly
379 permissive. */
380 phi_ref.base0 = false;
382 if (same_ref.ref)
383 phi_ref = same_ref;
384 else
386 /* Replace the lower bound of the largest argument with the size
387 of the smallest argument, and set PARMARRAY if any argument
388 was one. */
389 phi_ref.sizrng[0] = minsize;
390 phi_ref.parmarray = parmarray;
393 /* Avoid changing *THIS. */
394 if (pref && pref != this)
395 *pref = phi_ref;
397 psnlim->leave_phi (ref);
399 return phi_ref.ref;
402 /* Return the maximum amount of space remaining and if non-null, set
403 argument to the minimum. */
405 offset_int
406 access_ref::size_remaining (offset_int *pmin /* = NULL */) const
408 offset_int minbuf;
409 if (!pmin)
410 pmin = &minbuf;
412 /* add_offset() ensures the offset range isn't inverted. */
413 gcc_checking_assert (offrng[0] <= offrng[1]);
415 if (base0)
417 /* The offset into referenced object is zero-based (i.e., it's
418 not referenced by a pointer into middle of some unknown object). */
419 if (offrng[0] < 0 && offrng[1] < 0)
421 /* If the offset is negative the remaining size is zero. */
422 *pmin = 0;
423 return 0;
426 if (sizrng[1] <= offrng[0])
428 /* If the starting offset is greater than or equal to the upper
429 bound on the size of the object, the space remaining is zero.
430 As a special case, if it's equal, set *PMIN to -1 to let
431 the caller know the offset is valid and just past the end. */
432 *pmin = sizrng[1] == offrng[0] ? -1 : 0;
433 return 0;
436 /* Otherwise return the size minus the lower bound of the offset. */
437 offset_int or0 = offrng[0] < 0 ? 0 : offrng[0];
439 *pmin = sizrng[0] - or0;
440 return sizrng[1] - or0;
443 /* The offset to the referenced object isn't zero-based (i.e., it may
444 refer to a byte other than the first. The size of such an object
445 is constrained only by the size of the address space (the result
446 of max_object_size()). */
447 if (sizrng[1] <= offrng[0])
449 *pmin = 0;
450 return 0;
453 offset_int or0 = offrng[0] < 0 ? 0 : offrng[0];
455 *pmin = sizrng[0] - or0;
456 return sizrng[1] - or0;
459 /* Add the range [MIN, MAX] to the offset range. For known objects (with
460 zero-based offsets) at least one of whose offset's bounds is in range,
461 constrain the other (or both) to the bounds of the object (i.e., zero
462 and the upper bound of its size). This improves the quality of
463 diagnostics. */
465 void access_ref::add_offset (const offset_int &min, const offset_int &max)
467 if (min <= max)
469 /* To add an ordinary range just add it to the bounds. */
470 offrng[0] += min;
471 offrng[1] += max;
473 else if (!base0)
475 /* To add an inverted range to an offset to an unknown object
476 expand it to the maximum. */
477 add_max_offset ();
478 return;
480 else
482 /* To add an inverted range to an offset to an known object set
483 the upper bound to the maximum representable offset value
484 (which may be greater than MAX_OBJECT_SIZE).
485 The lower bound is either the sum of the current offset and
486 MIN when abs(MAX) is greater than the former, or zero otherwise.
487 Zero because then then inverted range includes the negative of
488 the lower bound. */
489 offset_int maxoff = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
490 offrng[1] = maxoff;
492 if (max >= 0)
494 offrng[0] = 0;
495 return;
498 offset_int absmax = wi::abs (max);
499 if (offrng[0] < absmax)
501 offrng[0] += min;
502 /* Cap the lower bound at the upper (set to MAXOFF above)
503 to avoid inadvertently recreating an inverted range. */
504 if (offrng[1] < offrng[0])
505 offrng[0] = offrng[1];
507 else
508 offrng[0] = 0;
511 if (!base0)
512 return;
514 /* When referencing a known object check to see if the offset computed
515 so far is in bounds... */
516 offset_int remrng[2];
517 remrng[1] = size_remaining (remrng);
518 if (remrng[1] > 0 || remrng[0] < 0)
520 /* ...if so, constrain it so that neither bound exceeds the size of
521 the object. Out of bounds offsets are left unchanged, and, for
522 better or worse, become in bounds later. They should be detected
523 and diagnosed at the point they first become invalid by
524 -Warray-bounds. */
525 if (offrng[0] < 0)
526 offrng[0] = 0;
527 if (offrng[1] > sizrng[1])
528 offrng[1] = sizrng[1];
532 /* Set a bit for the PHI in VISITED and return true if it wasn't
533 already set. */
535 bool
536 ssa_name_limit_t::visit_phi (tree ssa_name)
538 if (!visited)
539 visited = BITMAP_ALLOC (NULL);
541 /* Return false if SSA_NAME has already been visited. */
542 return bitmap_set_bit (visited, SSA_NAME_VERSION (ssa_name));
545 /* Clear a bit for the PHI in VISITED. */
547 void
548 ssa_name_limit_t::leave_phi (tree ssa_name)
550 /* Return false if SSA_NAME has already been visited. */
551 bitmap_clear_bit (visited, SSA_NAME_VERSION (ssa_name));
554 /* Return false if the SSA_NAME chain length counter has reached
555 the limit, otherwise increment the counter and return true. */
557 bool
558 ssa_name_limit_t::next ()
560 /* Return a negative value to let caller avoid recursing beyond
561 the specified limit. */
562 if (ssa_def_max == 0)
563 return false;
565 --ssa_def_max;
566 return true;
569 /* If the SSA_NAME has already been "seen" return a positive value.
570 Otherwise add it to VISITED. If the SSA_NAME limit has been
571 reached, return a negative value. Otherwise return zero. */
574 ssa_name_limit_t::next_phi (tree ssa_name)
577 gimple *def_stmt = SSA_NAME_DEF_STMT (ssa_name);
578 /* Return a positive value if the PHI has already been visited. */
579 if (gimple_code (def_stmt) == GIMPLE_PHI
580 && !visit_phi (ssa_name))
581 return 1;
584 /* Return a negative value to let caller avoid recursing beyond
585 the specified limit. */
586 if (ssa_def_max == 0)
587 return -1;
589 --ssa_def_max;
591 return 0;
594 ssa_name_limit_t::~ssa_name_limit_t ()
596 if (visited)
597 BITMAP_FREE (visited);
600 /* Default ctor. Initialize object with pointers to the range_query
601 and cache_type instances to use or null. */
603 pointer_query::pointer_query (range_query *qry /* = NULL */,
604 cache_type *cache /* = NULL */)
605 : rvals (qry), var_cache (cache), hits (), misses (),
606 failures (), depth (), max_depth ()
608 /* No op. */
611 /* Return a pointer to the cached access_ref instance for the SSA_NAME
612 PTR if it's there or null otherwise. */
614 const access_ref *
615 pointer_query::get_ref (tree ptr, int ostype /* = 1 */) const
617 if (!var_cache)
619 ++misses;
620 return NULL;
623 unsigned version = SSA_NAME_VERSION (ptr);
624 unsigned idx = version << 1 | (ostype & 1);
625 if (var_cache->indices.length () <= idx)
627 ++misses;
628 return NULL;
631 unsigned cache_idx = var_cache->indices[idx];
632 if (var_cache->access_refs.length () <= cache_idx)
634 ++misses;
635 return NULL;
638 access_ref &cache_ref = var_cache->access_refs[cache_idx];
639 if (cache_ref.ref)
641 ++hits;
642 return &cache_ref;
645 ++misses;
646 return NULL;
649 /* Retrieve the access_ref instance for a variable from the cache if it's
650 there or compute it and insert it into the cache if it's nonnonull. */
652 bool
653 pointer_query::get_ref (tree ptr, access_ref *pref, int ostype /* = 1 */)
655 const unsigned version
656 = TREE_CODE (ptr) == SSA_NAME ? SSA_NAME_VERSION (ptr) : 0;
658 if (var_cache && version)
660 unsigned idx = version << 1 | (ostype & 1);
661 if (idx < var_cache->indices.length ())
663 unsigned cache_idx = var_cache->indices[idx] - 1;
664 if (cache_idx < var_cache->access_refs.length ()
665 && var_cache->access_refs[cache_idx].ref)
667 ++hits;
668 *pref = var_cache->access_refs[cache_idx];
669 return true;
673 ++misses;
676 if (!compute_objsize (ptr, ostype, pref, this))
678 ++failures;
679 return false;
682 return true;
685 /* Add a copy of the access_ref REF for the SSA_NAME to the cache if it's
686 nonnull. */
688 void
689 pointer_query::put_ref (tree ptr, const access_ref &ref, int ostype /* = 1 */)
691 /* Only add populated/valid entries. */
692 if (!var_cache || !ref.ref || ref.sizrng[0] < 0)
693 return;
695 /* Add REF to the two-level cache. */
696 unsigned version = SSA_NAME_VERSION (ptr);
697 unsigned idx = version << 1 | (ostype & 1);
699 /* Grow INDICES if necessary. An index is valid if it's nonzero.
700 Its value minus one is the index into ACCESS_REFS. Not all
701 entries are valid. */
702 if (var_cache->indices.length () <= idx)
703 var_cache->indices.safe_grow_cleared (idx + 1);
705 if (!var_cache->indices[idx])
706 var_cache->indices[idx] = var_cache->access_refs.length () + 1;
708 /* Grow ACCESS_REF cache if necessary. An entry is valid if its
709 REF member is nonnull. All entries except for the last two
710 are valid. Once nonnull, the REF value must stay unchanged. */
711 unsigned cache_idx = var_cache->indices[idx];
712 if (var_cache->access_refs.length () <= cache_idx)
713 var_cache->access_refs.safe_grow_cleared (cache_idx + 1);
715 access_ref cache_ref = var_cache->access_refs[cache_idx - 1];
716 if (cache_ref.ref)
718 gcc_checking_assert (cache_ref.ref == ref.ref);
719 return;
722 cache_ref = ref;
725 /* Flush the cache if it's nonnull. */
727 void
728 pointer_query::flush_cache ()
730 if (!var_cache)
731 return;
732 var_cache->indices.release ();
733 var_cache->access_refs.release ();
736 /* Return true if NAME starts with __builtin_ or __sync_. */
738 static bool
739 is_builtin_name (const char *name)
741 if (strncmp (name, "__builtin_", 10) == 0)
742 return true;
743 if (strncmp (name, "__sync_", 7) == 0)
744 return true;
745 if (strncmp (name, "__atomic_", 9) == 0)
746 return true;
747 return false;
750 /* Return true if NODE should be considered for inline expansion regardless
751 of the optimization level. This means whenever a function is invoked with
752 its "internal" name, which normally contains the prefix "__builtin". */
754 bool
755 called_as_built_in (tree node)
757 /* Note that we must use DECL_NAME, not DECL_ASSEMBLER_NAME_SET_P since
758 we want the name used to call the function, not the name it
759 will have. */
760 const char *name = IDENTIFIER_POINTER (DECL_NAME (node));
761 return is_builtin_name (name);
764 /* Compute values M and N such that M divides (address of EXP - N) and such
765 that N < M. If these numbers can be determined, store M in alignp and N in
766 *BITPOSP and return true. Otherwise return false and store BITS_PER_UNIT to
767 *alignp and any bit-offset to *bitposp.
769 Note that the address (and thus the alignment) computed here is based
770 on the address to which a symbol resolves, whereas DECL_ALIGN is based
771 on the address at which an object is actually located. These two
772 addresses are not always the same. For example, on ARM targets,
773 the address &foo of a Thumb function foo() has the lowest bit set,
774 whereas foo() itself starts on an even address.
776 If ADDR_P is true we are taking the address of the memory reference EXP
777 and thus cannot rely on the access taking place. */
779 static bool
780 get_object_alignment_2 (tree exp, unsigned int *alignp,
781 unsigned HOST_WIDE_INT *bitposp, bool addr_p)
783 poly_int64 bitsize, bitpos;
784 tree offset;
785 machine_mode mode;
786 int unsignedp, reversep, volatilep;
787 unsigned int align = BITS_PER_UNIT;
788 bool known_alignment = false;
790 /* Get the innermost object and the constant (bitpos) and possibly
791 variable (offset) offset of the access. */
792 exp = get_inner_reference (exp, &bitsize, &bitpos, &offset, &mode,
793 &unsignedp, &reversep, &volatilep);
795 /* Extract alignment information from the innermost object and
796 possibly adjust bitpos and offset. */
797 if (TREE_CODE (exp) == FUNCTION_DECL)
799 /* Function addresses can encode extra information besides their
800 alignment. However, if TARGET_PTRMEMFUNC_VBIT_LOCATION
801 allows the low bit to be used as a virtual bit, we know
802 that the address itself must be at least 2-byte aligned. */
803 if (TARGET_PTRMEMFUNC_VBIT_LOCATION == ptrmemfunc_vbit_in_pfn)
804 align = 2 * BITS_PER_UNIT;
806 else if (TREE_CODE (exp) == LABEL_DECL)
808 else if (TREE_CODE (exp) == CONST_DECL)
810 /* The alignment of a CONST_DECL is determined by its initializer. */
811 exp = DECL_INITIAL (exp);
812 align = TYPE_ALIGN (TREE_TYPE (exp));
813 if (CONSTANT_CLASS_P (exp))
814 align = targetm.constant_alignment (exp, align);
816 known_alignment = true;
818 else if (DECL_P (exp))
820 align = DECL_ALIGN (exp);
821 known_alignment = true;
823 else if (TREE_CODE (exp) == INDIRECT_REF
824 || TREE_CODE (exp) == MEM_REF
825 || TREE_CODE (exp) == TARGET_MEM_REF)
827 tree addr = TREE_OPERAND (exp, 0);
828 unsigned ptr_align;
829 unsigned HOST_WIDE_INT ptr_bitpos;
830 unsigned HOST_WIDE_INT ptr_bitmask = ~0;
832 /* If the address is explicitely aligned, handle that. */
833 if (TREE_CODE (addr) == BIT_AND_EXPR
834 && TREE_CODE (TREE_OPERAND (addr, 1)) == INTEGER_CST)
836 ptr_bitmask = TREE_INT_CST_LOW (TREE_OPERAND (addr, 1));
837 ptr_bitmask *= BITS_PER_UNIT;
838 align = least_bit_hwi (ptr_bitmask);
839 addr = TREE_OPERAND (addr, 0);
842 known_alignment
843 = get_pointer_alignment_1 (addr, &ptr_align, &ptr_bitpos);
844 align = MAX (ptr_align, align);
846 /* Re-apply explicit alignment to the bitpos. */
847 ptr_bitpos &= ptr_bitmask;
849 /* The alignment of the pointer operand in a TARGET_MEM_REF
850 has to take the variable offset parts into account. */
851 if (TREE_CODE (exp) == TARGET_MEM_REF)
853 if (TMR_INDEX (exp))
855 unsigned HOST_WIDE_INT step = 1;
856 if (TMR_STEP (exp))
857 step = TREE_INT_CST_LOW (TMR_STEP (exp));
858 align = MIN (align, least_bit_hwi (step) * BITS_PER_UNIT);
860 if (TMR_INDEX2 (exp))
861 align = BITS_PER_UNIT;
862 known_alignment = false;
865 /* When EXP is an actual memory reference then we can use
866 TYPE_ALIGN of a pointer indirection to derive alignment.
867 Do so only if get_pointer_alignment_1 did not reveal absolute
868 alignment knowledge and if using that alignment would
869 improve the situation. */
870 unsigned int talign;
871 if (!addr_p && !known_alignment
872 && (talign = min_align_of_type (TREE_TYPE (exp)) * BITS_PER_UNIT)
873 && talign > align)
874 align = talign;
875 else
877 /* Else adjust bitpos accordingly. */
878 bitpos += ptr_bitpos;
879 if (TREE_CODE (exp) == MEM_REF
880 || TREE_CODE (exp) == TARGET_MEM_REF)
881 bitpos += mem_ref_offset (exp).force_shwi () * BITS_PER_UNIT;
884 else if (TREE_CODE (exp) == STRING_CST)
886 /* STRING_CST are the only constant objects we allow to be not
887 wrapped inside a CONST_DECL. */
888 align = TYPE_ALIGN (TREE_TYPE (exp));
889 if (CONSTANT_CLASS_P (exp))
890 align = targetm.constant_alignment (exp, align);
892 known_alignment = true;
895 /* If there is a non-constant offset part extract the maximum
896 alignment that can prevail. */
897 if (offset)
899 unsigned int trailing_zeros = tree_ctz (offset);
900 if (trailing_zeros < HOST_BITS_PER_INT)
902 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
903 if (inner)
904 align = MIN (align, inner);
908 /* Account for the alignment of runtime coefficients, so that the constant
909 bitpos is guaranteed to be accurate. */
910 unsigned int alt_align = ::known_alignment (bitpos - bitpos.coeffs[0]);
911 if (alt_align != 0 && alt_align < align)
913 align = alt_align;
914 known_alignment = false;
917 *alignp = align;
918 *bitposp = bitpos.coeffs[0] & (align - 1);
919 return known_alignment;
922 /* For a memory reference expression EXP compute values M and N such that M
923 divides (&EXP - N) and such that N < M. If these numbers can be determined,
924 store M in alignp and N in *BITPOSP and return true. Otherwise return false
925 and store BITS_PER_UNIT to *alignp and any bit-offset to *bitposp. */
927 bool
928 get_object_alignment_1 (tree exp, unsigned int *alignp,
929 unsigned HOST_WIDE_INT *bitposp)
931 return get_object_alignment_2 (exp, alignp, bitposp, false);
934 /* Return the alignment in bits of EXP, an object. */
936 unsigned int
937 get_object_alignment (tree exp)
939 unsigned HOST_WIDE_INT bitpos = 0;
940 unsigned int align;
942 get_object_alignment_1 (exp, &align, &bitpos);
944 /* align and bitpos now specify known low bits of the pointer.
945 ptr & (align - 1) == bitpos. */
947 if (bitpos != 0)
948 align = least_bit_hwi (bitpos);
949 return align;
952 /* For a pointer valued expression EXP compute values M and N such that M
953 divides (EXP - N) and such that N < M. If these numbers can be determined,
954 store M in alignp and N in *BITPOSP and return true. Return false if
955 the results are just a conservative approximation.
957 If EXP is not a pointer, false is returned too. */
959 bool
960 get_pointer_alignment_1 (tree exp, unsigned int *alignp,
961 unsigned HOST_WIDE_INT *bitposp)
963 STRIP_NOPS (exp);
965 if (TREE_CODE (exp) == ADDR_EXPR)
966 return get_object_alignment_2 (TREE_OPERAND (exp, 0),
967 alignp, bitposp, true);
968 else if (TREE_CODE (exp) == POINTER_PLUS_EXPR)
970 unsigned int align;
971 unsigned HOST_WIDE_INT bitpos;
972 bool res = get_pointer_alignment_1 (TREE_OPERAND (exp, 0),
973 &align, &bitpos);
974 if (TREE_CODE (TREE_OPERAND (exp, 1)) == INTEGER_CST)
975 bitpos += TREE_INT_CST_LOW (TREE_OPERAND (exp, 1)) * BITS_PER_UNIT;
976 else
978 unsigned int trailing_zeros = tree_ctz (TREE_OPERAND (exp, 1));
979 if (trailing_zeros < HOST_BITS_PER_INT)
981 unsigned int inner = (1U << trailing_zeros) * BITS_PER_UNIT;
982 if (inner)
983 align = MIN (align, inner);
986 *alignp = align;
987 *bitposp = bitpos & (align - 1);
988 return res;
990 else if (TREE_CODE (exp) == SSA_NAME
991 && POINTER_TYPE_P (TREE_TYPE (exp)))
993 unsigned int ptr_align, ptr_misalign;
994 struct ptr_info_def *pi = SSA_NAME_PTR_INFO (exp);
996 if (pi && get_ptr_info_alignment (pi, &ptr_align, &ptr_misalign))
998 *bitposp = ptr_misalign * BITS_PER_UNIT;
999 *alignp = ptr_align * BITS_PER_UNIT;
1000 /* Make sure to return a sensible alignment when the multiplication
1001 by BITS_PER_UNIT overflowed. */
1002 if (*alignp == 0)
1003 *alignp = 1u << (HOST_BITS_PER_INT - 1);
1004 /* We cannot really tell whether this result is an approximation. */
1005 return false;
1007 else
1009 *bitposp = 0;
1010 *alignp = BITS_PER_UNIT;
1011 return false;
1014 else if (TREE_CODE (exp) == INTEGER_CST)
1016 *alignp = BIGGEST_ALIGNMENT;
1017 *bitposp = ((TREE_INT_CST_LOW (exp) * BITS_PER_UNIT)
1018 & (BIGGEST_ALIGNMENT - 1));
1019 return true;
1022 *bitposp = 0;
1023 *alignp = BITS_PER_UNIT;
1024 return false;
1027 /* Return the alignment in bits of EXP, a pointer valued expression.
1028 The alignment returned is, by default, the alignment of the thing that
1029 EXP points to. If it is not a POINTER_TYPE, 0 is returned.
1031 Otherwise, look at the expression to see if we can do better, i.e., if the
1032 expression is actually pointing at an object whose alignment is tighter. */
1034 unsigned int
1035 get_pointer_alignment (tree exp)
1037 unsigned HOST_WIDE_INT bitpos = 0;
1038 unsigned int align;
1040 get_pointer_alignment_1 (exp, &align, &bitpos);
1042 /* align and bitpos now specify known low bits of the pointer.
1043 ptr & (align - 1) == bitpos. */
1045 if (bitpos != 0)
1046 align = least_bit_hwi (bitpos);
1048 return align;
1051 /* Return the number of leading non-zero elements in the sequence
1052 [ PTR, PTR + MAXELTS ) where each element's size is ELTSIZE bytes.
1053 ELTSIZE must be a power of 2 less than 8. Used by c_strlen. */
1055 unsigned
1056 string_length (const void *ptr, unsigned eltsize, unsigned maxelts)
1058 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
1060 unsigned n;
1062 if (eltsize == 1)
1064 /* Optimize the common case of plain char. */
1065 for (n = 0; n < maxelts; n++)
1067 const char *elt = (const char*) ptr + n;
1068 if (!*elt)
1069 break;
1072 else
1074 for (n = 0; n < maxelts; n++)
1076 const char *elt = (const char*) ptr + n * eltsize;
1077 if (!memcmp (elt, "\0\0\0\0", eltsize))
1078 break;
1081 return n;
1084 /* For a call EXPR at LOC to a function FNAME that expects a string
1085 in the argument ARG, issue a diagnostic due to it being a called
1086 with an argument that is a character array with no terminating
1087 NUL. SIZE is the EXACT size of the array, and BNDRNG the number
1088 of characters in which the NUL is expected. Either EXPR or FNAME
1089 may be null but noth both. SIZE may be null when BNDRNG is null. */
1091 void
1092 warn_string_no_nul (location_t loc, tree expr, const char *fname,
1093 tree arg, tree decl, tree size /* = NULL_TREE */,
1094 bool exact /* = false */,
1095 const wide_int bndrng[2] /* = NULL */)
1097 if ((expr && TREE_NO_WARNING (expr)) || TREE_NO_WARNING (arg))
1098 return;
1100 loc = expansion_point_location_if_in_system_header (loc);
1101 bool warned;
1103 /* Format the bound range as a string to keep the nuber of messages
1104 from exploding. */
1105 char bndstr[80];
1106 *bndstr = 0;
1107 if (bndrng)
1109 if (bndrng[0] == bndrng[1])
1110 sprintf (bndstr, "%llu", (unsigned long long) bndrng[0].to_uhwi ());
1111 else
1112 sprintf (bndstr, "[%llu, %llu]",
1113 (unsigned long long) bndrng[0].to_uhwi (),
1114 (unsigned long long) bndrng[1].to_uhwi ());
1117 const tree maxobjsize = max_object_size ();
1118 const wide_int maxsiz = wi::to_wide (maxobjsize);
1119 if (expr)
1121 tree func = get_callee_fndecl (expr);
1122 if (bndrng)
1124 if (wi::ltu_p (maxsiz, bndrng[0]))
1125 warned = warning_at (loc, OPT_Wstringop_overread,
1126 "%K%qD specified bound %s exceeds "
1127 "maximum object size %E",
1128 expr, func, bndstr, maxobjsize);
1129 else
1131 bool maybe = wi::to_wide (size) == bndrng[0];
1132 warned = warning_at (loc, OPT_Wstringop_overread,
1133 exact
1134 ? G_("%K%qD specified bound %s exceeds "
1135 "the size %E of unterminated array")
1136 : (maybe
1137 ? G_("%K%qD specified bound %s may "
1138 "exceed the size of at most %E "
1139 "of unterminated array")
1140 : G_("%K%qD specified bound %s exceeds "
1141 "the size of at most %E "
1142 "of unterminated array")),
1143 expr, func, bndstr, size);
1146 else
1147 warned = warning_at (loc, OPT_Wstringop_overread,
1148 "%K%qD argument missing terminating nul",
1149 expr, func);
1151 else
1153 if (bndrng)
1155 if (wi::ltu_p (maxsiz, bndrng[0]))
1156 warned = warning_at (loc, OPT_Wstringop_overread,
1157 "%qs specified bound %s exceeds "
1158 "maximum object size %E",
1159 fname, bndstr, maxobjsize);
1160 else
1162 bool maybe = wi::to_wide (size) == bndrng[0];
1163 warned = warning_at (loc, OPT_Wstringop_overread,
1164 exact
1165 ? G_("%qs specified bound %s exceeds "
1166 "the size %E of unterminated array")
1167 : (maybe
1168 ? G_("%qs specified bound %s may "
1169 "exceed the size of at most %E "
1170 "of unterminated array")
1171 : G_("%qs specified bound %s exceeds "
1172 "the size of at most %E "
1173 "of unterminated array")),
1174 fname, bndstr, size);
1177 else
1178 warned = warning_at (loc, OPT_Wstringop_overread,
1179 "%qs argument missing terminating nul",
1180 fname);
1183 if (warned)
1185 inform (DECL_SOURCE_LOCATION (decl),
1186 "referenced argument declared here");
1187 TREE_NO_WARNING (arg) = 1;
1188 if (expr)
1189 TREE_NO_WARNING (expr) = 1;
1193 /* For a call EXPR (which may be null) that expects a string argument
1194 SRC as an argument, returns false if SRC is a character array with
1195 no terminating NUL. When nonnull, BOUND is the number of characters
1196 in which to expect the terminating NUL. RDONLY is true for read-only
1197 accesses such as strcmp, false for read-write such as strcpy. When
1198 EXPR is also issues a warning. */
1200 bool
1201 check_nul_terminated_array (tree expr, tree src,
1202 tree bound /* = NULL_TREE */)
1204 /* The constant size of the array SRC points to. The actual size
1205 may be less of EXACT is true, but not more. */
1206 tree size;
1207 /* True if SRC involves a non-constant offset into the array. */
1208 bool exact;
1209 /* The unterminated constant array SRC points to. */
1210 tree nonstr = unterminated_array (src, &size, &exact);
1211 if (!nonstr)
1212 return true;
1214 /* NONSTR refers to the non-nul terminated constant array and SIZE
1215 is the constant size of the array in bytes. EXACT is true when
1216 SIZE is exact. */
1218 wide_int bndrng[2];
1219 if (bound)
1221 if (TREE_CODE (bound) == INTEGER_CST)
1222 bndrng[0] = bndrng[1] = wi::to_wide (bound);
1223 else
1225 value_range_kind rng = get_range_info (bound, bndrng, bndrng + 1);
1226 if (rng != VR_RANGE)
1227 return true;
1230 if (exact)
1232 if (wi::leu_p (bndrng[0], wi::to_wide (size)))
1233 return true;
1235 else if (wi::lt_p (bndrng[0], wi::to_wide (size), UNSIGNED))
1236 return true;
1239 if (expr)
1240 warn_string_no_nul (EXPR_LOCATION (expr), expr, NULL, src, nonstr,
1241 size, exact, bound ? bndrng : NULL);
1243 return false;
1246 /* If EXP refers to an unterminated constant character array return
1247 the declaration of the object of which the array is a member or
1248 element and if SIZE is not null, set *SIZE to the size of
1249 the unterminated array and set *EXACT if the size is exact or
1250 clear it otherwise. Otherwise return null. */
1252 tree
1253 unterminated_array (tree exp, tree *size /* = NULL */, bool *exact /* = NULL */)
1255 /* C_STRLEN will return NULL and set DECL in the info
1256 structure if EXP references a unterminated array. */
1257 c_strlen_data lendata = { };
1258 tree len = c_strlen (exp, 1, &lendata);
1259 if (len == NULL_TREE && lendata.minlen && lendata.decl)
1261 if (size)
1263 len = lendata.minlen;
1264 if (lendata.off)
1266 /* Constant offsets are already accounted for in LENDATA.MINLEN,
1267 but not in a SSA_NAME + CST expression. */
1268 if (TREE_CODE (lendata.off) == INTEGER_CST)
1269 *exact = true;
1270 else if (TREE_CODE (lendata.off) == PLUS_EXPR
1271 && TREE_CODE (TREE_OPERAND (lendata.off, 1)) == INTEGER_CST)
1273 /* Subtract the offset from the size of the array. */
1274 *exact = false;
1275 tree temp = TREE_OPERAND (lendata.off, 1);
1276 temp = fold_convert (ssizetype, temp);
1277 len = fold_build2 (MINUS_EXPR, ssizetype, len, temp);
1279 else
1280 *exact = false;
1282 else
1283 *exact = true;
1285 *size = len;
1287 return lendata.decl;
1290 return NULL_TREE;
1293 /* Compute the length of a null-terminated character string or wide
1294 character string handling character sizes of 1, 2, and 4 bytes.
1295 TREE_STRING_LENGTH is not the right way because it evaluates to
1296 the size of the character array in bytes (as opposed to characters)
1297 and because it can contain a zero byte in the middle.
1299 ONLY_VALUE should be nonzero if the result is not going to be emitted
1300 into the instruction stream and zero if it is going to be expanded.
1301 E.g. with i++ ? "foo" : "bar", if ONLY_VALUE is nonzero, constant 3
1302 is returned, otherwise NULL, since
1303 len = c_strlen (ARG, 1); if (len) expand_expr (len, ...); would not
1304 evaluate the side-effects.
1306 If ONLY_VALUE is two then we do not emit warnings about out-of-bound
1307 accesses. Note that this implies the result is not going to be emitted
1308 into the instruction stream.
1310 Additional information about the string accessed may be recorded
1311 in DATA. For example, if ARG references an unterminated string,
1312 then the declaration will be stored in the DECL field. If the
1313 length of the unterminated string can be determined, it'll be
1314 stored in the LEN field. Note this length could well be different
1315 than what a C strlen call would return.
1317 ELTSIZE is 1 for normal single byte character strings, and 2 or
1318 4 for wide characer strings. ELTSIZE is by default 1.
1320 The value returned is of type `ssizetype'. */
1322 tree
1323 c_strlen (tree arg, int only_value, c_strlen_data *data, unsigned eltsize)
1325 /* If we were not passed a DATA pointer, then get one to a local
1326 structure. That avoids having to check DATA for NULL before
1327 each time we want to use it. */
1328 c_strlen_data local_strlen_data = { };
1329 if (!data)
1330 data = &local_strlen_data;
1332 gcc_checking_assert (eltsize == 1 || eltsize == 2 || eltsize == 4);
1334 tree src = STRIP_NOPS (arg);
1335 if (TREE_CODE (src) == COND_EXPR
1336 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
1338 tree len1, len2;
1340 len1 = c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
1341 len2 = c_strlen (TREE_OPERAND (src, 2), only_value, data, eltsize);
1342 if (tree_int_cst_equal (len1, len2))
1343 return len1;
1346 if (TREE_CODE (src) == COMPOUND_EXPR
1347 && (only_value || !TREE_SIDE_EFFECTS (TREE_OPERAND (src, 0))))
1348 return c_strlen (TREE_OPERAND (src, 1), only_value, data, eltsize);
1350 location_t loc = EXPR_LOC_OR_LOC (src, input_location);
1352 /* Offset from the beginning of the string in bytes. */
1353 tree byteoff;
1354 tree memsize;
1355 tree decl;
1356 src = string_constant (src, &byteoff, &memsize, &decl);
1357 if (src == 0)
1358 return NULL_TREE;
1360 /* Determine the size of the string element. */
1361 if (eltsize != tree_to_uhwi (TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (src)))))
1362 return NULL_TREE;
1364 /* Set MAXELTS to sizeof (SRC) / sizeof (*SRC) - 1, the maximum possible
1365 length of SRC. Prefer TYPE_SIZE() to TREE_STRING_LENGTH() if possible
1366 in case the latter is less than the size of the array, such as when
1367 SRC refers to a short string literal used to initialize a large array.
1368 In that case, the elements of the array after the terminating NUL are
1369 all NUL. */
1370 HOST_WIDE_INT strelts = TREE_STRING_LENGTH (src);
1371 strelts = strelts / eltsize;
1373 if (!tree_fits_uhwi_p (memsize))
1374 return NULL_TREE;
1376 HOST_WIDE_INT maxelts = tree_to_uhwi (memsize) / eltsize;
1378 /* PTR can point to the byte representation of any string type, including
1379 char* and wchar_t*. */
1380 const char *ptr = TREE_STRING_POINTER (src);
1382 if (byteoff && TREE_CODE (byteoff) != INTEGER_CST)
1384 /* The code below works only for single byte character types. */
1385 if (eltsize != 1)
1386 return NULL_TREE;
1388 /* If the string has an internal NUL character followed by any
1389 non-NUL characters (e.g., "foo\0bar"), we can't compute
1390 the offset to the following NUL if we don't know where to
1391 start searching for it. */
1392 unsigned len = string_length (ptr, eltsize, strelts);
1394 /* Return when an embedded null character is found or none at all.
1395 In the latter case, set the DECL/LEN field in the DATA structure
1396 so that callers may examine them. */
1397 if (len + 1 < strelts)
1398 return NULL_TREE;
1399 else if (len >= maxelts)
1401 data->decl = decl;
1402 data->off = byteoff;
1403 data->minlen = ssize_int (len);
1404 return NULL_TREE;
1407 /* For empty strings the result should be zero. */
1408 if (len == 0)
1409 return ssize_int (0);
1411 /* We don't know the starting offset, but we do know that the string
1412 has no internal zero bytes. If the offset falls within the bounds
1413 of the string subtract the offset from the length of the string,
1414 and return that. Otherwise the length is zero. Take care to
1415 use SAVE_EXPR in case the OFFSET has side-effects. */
1416 tree offsave = TREE_SIDE_EFFECTS (byteoff) ? save_expr (byteoff)
1417 : byteoff;
1418 offsave = fold_convert_loc (loc, sizetype, offsave);
1419 tree condexp = fold_build2_loc (loc, LE_EXPR, boolean_type_node, offsave,
1420 size_int (len));
1421 tree lenexp = fold_build2_loc (loc, MINUS_EXPR, sizetype, size_int (len),
1422 offsave);
1423 lenexp = fold_convert_loc (loc, ssizetype, lenexp);
1424 return fold_build3_loc (loc, COND_EXPR, ssizetype, condexp, lenexp,
1425 build_zero_cst (ssizetype));
1428 /* Offset from the beginning of the string in elements. */
1429 HOST_WIDE_INT eltoff;
1431 /* We have a known offset into the string. Start searching there for
1432 a null character if we can represent it as a single HOST_WIDE_INT. */
1433 if (byteoff == 0)
1434 eltoff = 0;
1435 else if (! tree_fits_uhwi_p (byteoff) || tree_to_uhwi (byteoff) % eltsize)
1436 eltoff = -1;
1437 else
1438 eltoff = tree_to_uhwi (byteoff) / eltsize;
1440 /* If the offset is known to be out of bounds, warn, and call strlen at
1441 runtime. */
1442 if (eltoff < 0 || eltoff >= maxelts)
1444 /* Suppress multiple warnings for propagated constant strings. */
1445 if (only_value != 2
1446 && !TREE_NO_WARNING (arg)
1447 && warning_at (loc, OPT_Warray_bounds,
1448 "offset %qwi outside bounds of constant string",
1449 eltoff))
1451 if (decl)
1452 inform (DECL_SOURCE_LOCATION (decl), "%qE declared here", decl);
1453 TREE_NO_WARNING (arg) = 1;
1455 return NULL_TREE;
1458 /* If eltoff is larger than strelts but less than maxelts the
1459 string length is zero, since the excess memory will be zero. */
1460 if (eltoff > strelts)
1461 return ssize_int (0);
1463 /* Use strlen to search for the first zero byte. Since any strings
1464 constructed with build_string will have nulls appended, we win even
1465 if we get handed something like (char[4])"abcd".
1467 Since ELTOFF is our starting index into the string, no further
1468 calculation is needed. */
1469 unsigned len = string_length (ptr + eltoff * eltsize, eltsize,
1470 strelts - eltoff);
1472 /* Don't know what to return if there was no zero termination.
1473 Ideally this would turn into a gcc_checking_assert over time.
1474 Set DECL/LEN so callers can examine them. */
1475 if (len >= maxelts - eltoff)
1477 data->decl = decl;
1478 data->off = byteoff;
1479 data->minlen = ssize_int (len);
1480 return NULL_TREE;
1483 return ssize_int (len);
1486 /* Return a constant integer corresponding to target reading
1487 GET_MODE_BITSIZE (MODE) bits from string constant STR. If
1488 NULL_TERMINATED_P, reading stops after '\0' character, all further ones
1489 are assumed to be zero, otherwise it reads as many characters
1490 as needed. */
1493 c_readstr (const char *str, scalar_int_mode mode,
1494 bool null_terminated_p/*=true*/)
1496 HOST_WIDE_INT ch;
1497 unsigned int i, j;
1498 HOST_WIDE_INT tmp[MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT];
1500 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
1501 unsigned int len = (GET_MODE_PRECISION (mode) + HOST_BITS_PER_WIDE_INT - 1)
1502 / HOST_BITS_PER_WIDE_INT;
1504 gcc_assert (len <= MAX_BITSIZE_MODE_ANY_INT / HOST_BITS_PER_WIDE_INT);
1505 for (i = 0; i < len; i++)
1506 tmp[i] = 0;
1508 ch = 1;
1509 for (i = 0; i < GET_MODE_SIZE (mode); i++)
1511 j = i;
1512 if (WORDS_BIG_ENDIAN)
1513 j = GET_MODE_SIZE (mode) - i - 1;
1514 if (BYTES_BIG_ENDIAN != WORDS_BIG_ENDIAN
1515 && GET_MODE_SIZE (mode) >= UNITS_PER_WORD)
1516 j = j + UNITS_PER_WORD - 2 * (j % UNITS_PER_WORD) - 1;
1517 j *= BITS_PER_UNIT;
1519 if (ch || !null_terminated_p)
1520 ch = (unsigned char) str[i];
1521 tmp[j / HOST_BITS_PER_WIDE_INT] |= ch << (j % HOST_BITS_PER_WIDE_INT);
1524 wide_int c = wide_int::from_array (tmp, len, GET_MODE_PRECISION (mode));
1525 return immed_wide_int_const (c, mode);
1528 /* Cast a target constant CST to target CHAR and if that value fits into
1529 host char type, return zero and put that value into variable pointed to by
1530 P. */
1532 static int
1533 target_char_cast (tree cst, char *p)
1535 unsigned HOST_WIDE_INT val, hostval;
1537 if (TREE_CODE (cst) != INTEGER_CST
1538 || CHAR_TYPE_SIZE > HOST_BITS_PER_WIDE_INT)
1539 return 1;
1541 /* Do not care if it fits or not right here. */
1542 val = TREE_INT_CST_LOW (cst);
1544 if (CHAR_TYPE_SIZE < HOST_BITS_PER_WIDE_INT)
1545 val &= (HOST_WIDE_INT_1U << CHAR_TYPE_SIZE) - 1;
1547 hostval = val;
1548 if (HOST_BITS_PER_CHAR < HOST_BITS_PER_WIDE_INT)
1549 hostval &= (HOST_WIDE_INT_1U << HOST_BITS_PER_CHAR) - 1;
1551 if (val != hostval)
1552 return 1;
1554 *p = hostval;
1555 return 0;
1558 /* Similar to save_expr, but assumes that arbitrary code is not executed
1559 in between the multiple evaluations. In particular, we assume that a
1560 non-addressable local variable will not be modified. */
1562 static tree
1563 builtin_save_expr (tree exp)
1565 if (TREE_CODE (exp) == SSA_NAME
1566 || (TREE_ADDRESSABLE (exp) == 0
1567 && (TREE_CODE (exp) == PARM_DECL
1568 || (VAR_P (exp) && !TREE_STATIC (exp)))))
1569 return exp;
1571 return save_expr (exp);
1574 /* Given TEM, a pointer to a stack frame, follow the dynamic chain COUNT
1575 times to get the address of either a higher stack frame, or a return
1576 address located within it (depending on FNDECL_CODE). */
1578 static rtx
1579 expand_builtin_return_addr (enum built_in_function fndecl_code, int count)
1581 int i;
1582 rtx tem = INITIAL_FRAME_ADDRESS_RTX;
1583 if (tem == NULL_RTX)
1585 /* For a zero count with __builtin_return_address, we don't care what
1586 frame address we return, because target-specific definitions will
1587 override us. Therefore frame pointer elimination is OK, and using
1588 the soft frame pointer is OK.
1590 For a nonzero count, or a zero count with __builtin_frame_address,
1591 we require a stable offset from the current frame pointer to the
1592 previous one, so we must use the hard frame pointer, and
1593 we must disable frame pointer elimination. */
1594 if (count == 0 && fndecl_code == BUILT_IN_RETURN_ADDRESS)
1595 tem = frame_pointer_rtx;
1596 else
1598 tem = hard_frame_pointer_rtx;
1600 /* Tell reload not to eliminate the frame pointer. */
1601 crtl->accesses_prior_frames = 1;
1605 if (count > 0)
1606 SETUP_FRAME_ADDRESSES ();
1608 /* On the SPARC, the return address is not in the frame, it is in a
1609 register. There is no way to access it off of the current frame
1610 pointer, but it can be accessed off the previous frame pointer by
1611 reading the value from the register window save area. */
1612 if (RETURN_ADDR_IN_PREVIOUS_FRAME && fndecl_code == BUILT_IN_RETURN_ADDRESS)
1613 count--;
1615 /* Scan back COUNT frames to the specified frame. */
1616 for (i = 0; i < count; i++)
1618 /* Assume the dynamic chain pointer is in the word that the
1619 frame address points to, unless otherwise specified. */
1620 tem = DYNAMIC_CHAIN_ADDRESS (tem);
1621 tem = memory_address (Pmode, tem);
1622 tem = gen_frame_mem (Pmode, tem);
1623 tem = copy_to_reg (tem);
1626 /* For __builtin_frame_address, return what we've got. But, on
1627 the SPARC for example, we may have to add a bias. */
1628 if (fndecl_code == BUILT_IN_FRAME_ADDRESS)
1629 return FRAME_ADDR_RTX (tem);
1631 /* For __builtin_return_address, get the return address from that frame. */
1632 #ifdef RETURN_ADDR_RTX
1633 tem = RETURN_ADDR_RTX (count, tem);
1634 #else
1635 tem = memory_address (Pmode,
1636 plus_constant (Pmode, tem, GET_MODE_SIZE (Pmode)));
1637 tem = gen_frame_mem (Pmode, tem);
1638 #endif
1639 return tem;
1642 /* Alias set used for setjmp buffer. */
1643 static alias_set_type setjmp_alias_set = -1;
1645 /* Construct the leading half of a __builtin_setjmp call. Control will
1646 return to RECEIVER_LABEL. This is also called directly by the SJLJ
1647 exception handling code. */
1649 void
1650 expand_builtin_setjmp_setup (rtx buf_addr, rtx receiver_label)
1652 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1653 rtx stack_save;
1654 rtx mem;
1656 if (setjmp_alias_set == -1)
1657 setjmp_alias_set = new_alias_set ();
1659 buf_addr = convert_memory_address (Pmode, buf_addr);
1661 buf_addr = force_reg (Pmode, force_operand (buf_addr, NULL_RTX));
1663 /* We store the frame pointer and the address of receiver_label in
1664 the buffer and use the rest of it for the stack save area, which
1665 is machine-dependent. */
1667 mem = gen_rtx_MEM (Pmode, buf_addr);
1668 set_mem_alias_set (mem, setjmp_alias_set);
1669 emit_move_insn (mem, hard_frame_pointer_rtx);
1671 mem = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1672 GET_MODE_SIZE (Pmode))),
1673 set_mem_alias_set (mem, setjmp_alias_set);
1675 emit_move_insn (validize_mem (mem),
1676 force_reg (Pmode, gen_rtx_LABEL_REF (Pmode, receiver_label)));
1678 stack_save = gen_rtx_MEM (sa_mode,
1679 plus_constant (Pmode, buf_addr,
1680 2 * GET_MODE_SIZE (Pmode)));
1681 set_mem_alias_set (stack_save, setjmp_alias_set);
1682 emit_stack_save (SAVE_NONLOCAL, &stack_save);
1684 /* If there is further processing to do, do it. */
1685 if (targetm.have_builtin_setjmp_setup ())
1686 emit_insn (targetm.gen_builtin_setjmp_setup (buf_addr));
1688 /* We have a nonlocal label. */
1689 cfun->has_nonlocal_label = 1;
1692 /* Construct the trailing part of a __builtin_setjmp call. This is
1693 also called directly by the SJLJ exception handling code.
1694 If RECEIVER_LABEL is NULL, instead contruct a nonlocal goto handler. */
1696 void
1697 expand_builtin_setjmp_receiver (rtx receiver_label)
1699 rtx chain;
1701 /* Mark the FP as used when we get here, so we have to make sure it's
1702 marked as used by this function. */
1703 emit_use (hard_frame_pointer_rtx);
1705 /* Mark the static chain as clobbered here so life information
1706 doesn't get messed up for it. */
1707 chain = rtx_for_static_chain (current_function_decl, true);
1708 if (chain && REG_P (chain))
1709 emit_clobber (chain);
1711 if (!HARD_FRAME_POINTER_IS_ARG_POINTER && fixed_regs[ARG_POINTER_REGNUM])
1713 /* If the argument pointer can be eliminated in favor of the
1714 frame pointer, we don't need to restore it. We assume here
1715 that if such an elimination is present, it can always be used.
1716 This is the case on all known machines; if we don't make this
1717 assumption, we do unnecessary saving on many machines. */
1718 size_t i;
1719 static const struct elims {const int from, to;} elim_regs[] = ELIMINABLE_REGS;
1721 for (i = 0; i < ARRAY_SIZE (elim_regs); i++)
1722 if (elim_regs[i].from == ARG_POINTER_REGNUM
1723 && elim_regs[i].to == HARD_FRAME_POINTER_REGNUM)
1724 break;
1726 if (i == ARRAY_SIZE (elim_regs))
1728 /* Now restore our arg pointer from the address at which it
1729 was saved in our stack frame. */
1730 emit_move_insn (crtl->args.internal_arg_pointer,
1731 copy_to_reg (get_arg_pointer_save_area ()));
1735 if (receiver_label != NULL && targetm.have_builtin_setjmp_receiver ())
1736 emit_insn (targetm.gen_builtin_setjmp_receiver (receiver_label));
1737 else if (targetm.have_nonlocal_goto_receiver ())
1738 emit_insn (targetm.gen_nonlocal_goto_receiver ());
1739 else
1740 { /* Nothing */ }
1742 /* We must not allow the code we just generated to be reordered by
1743 scheduling. Specifically, the update of the frame pointer must
1744 happen immediately, not later. */
1745 emit_insn (gen_blockage ());
1748 /* __builtin_longjmp is passed a pointer to an array of five words (not
1749 all will be used on all machines). It operates similarly to the C
1750 library function of the same name, but is more efficient. Much of
1751 the code below is copied from the handling of non-local gotos. */
1753 static void
1754 expand_builtin_longjmp (rtx buf_addr, rtx value)
1756 rtx fp, lab, stack;
1757 rtx_insn *insn, *last;
1758 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
1760 /* DRAP is needed for stack realign if longjmp is expanded to current
1761 function */
1762 if (SUPPORTS_STACK_ALIGNMENT)
1763 crtl->need_drap = true;
1765 if (setjmp_alias_set == -1)
1766 setjmp_alias_set = new_alias_set ();
1768 buf_addr = convert_memory_address (Pmode, buf_addr);
1770 buf_addr = force_reg (Pmode, buf_addr);
1772 /* We require that the user must pass a second argument of 1, because
1773 that is what builtin_setjmp will return. */
1774 gcc_assert (value == const1_rtx);
1776 last = get_last_insn ();
1777 if (targetm.have_builtin_longjmp ())
1778 emit_insn (targetm.gen_builtin_longjmp (buf_addr));
1779 else
1781 fp = gen_rtx_MEM (Pmode, buf_addr);
1782 lab = gen_rtx_MEM (Pmode, plus_constant (Pmode, buf_addr,
1783 GET_MODE_SIZE (Pmode)));
1785 stack = gen_rtx_MEM (sa_mode, plus_constant (Pmode, buf_addr,
1786 2 * GET_MODE_SIZE (Pmode)));
1787 set_mem_alias_set (fp, setjmp_alias_set);
1788 set_mem_alias_set (lab, setjmp_alias_set);
1789 set_mem_alias_set (stack, setjmp_alias_set);
1791 /* Pick up FP, label, and SP from the block and jump. This code is
1792 from expand_goto in stmt.c; see there for detailed comments. */
1793 if (targetm.have_nonlocal_goto ())
1794 /* We have to pass a value to the nonlocal_goto pattern that will
1795 get copied into the static_chain pointer, but it does not matter
1796 what that value is, because builtin_setjmp does not use it. */
1797 emit_insn (targetm.gen_nonlocal_goto (value, lab, stack, fp));
1798 else
1800 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1801 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1803 lab = copy_to_reg (lab);
1805 /* Restore the frame pointer and stack pointer. We must use a
1806 temporary since the setjmp buffer may be a local. */
1807 fp = copy_to_reg (fp);
1808 emit_stack_restore (SAVE_NONLOCAL, stack);
1810 /* Ensure the frame pointer move is not optimized. */
1811 emit_insn (gen_blockage ());
1812 emit_clobber (hard_frame_pointer_rtx);
1813 emit_clobber (frame_pointer_rtx);
1814 emit_move_insn (hard_frame_pointer_rtx, fp);
1816 emit_use (hard_frame_pointer_rtx);
1817 emit_use (stack_pointer_rtx);
1818 emit_indirect_jump (lab);
1822 /* Search backwards and mark the jump insn as a non-local goto.
1823 Note that this precludes the use of __builtin_longjmp to a
1824 __builtin_setjmp target in the same function. However, we've
1825 already cautioned the user that these functions are for
1826 internal exception handling use only. */
1827 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1829 gcc_assert (insn != last);
1831 if (JUMP_P (insn))
1833 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1834 break;
1836 else if (CALL_P (insn))
1837 break;
1841 static inline bool
1842 more_const_call_expr_args_p (const const_call_expr_arg_iterator *iter)
1844 return (iter->i < iter->n);
1847 /* This function validates the types of a function call argument list
1848 against a specified list of tree_codes. If the last specifier is a 0,
1849 that represents an ellipsis, otherwise the last specifier must be a
1850 VOID_TYPE. */
1852 static bool
1853 validate_arglist (const_tree callexpr, ...)
1855 enum tree_code code;
1856 bool res = 0;
1857 va_list ap;
1858 const_call_expr_arg_iterator iter;
1859 const_tree arg;
1861 va_start (ap, callexpr);
1862 init_const_call_expr_arg_iterator (callexpr, &iter);
1864 /* Get a bitmap of pointer argument numbers declared attribute nonnull. */
1865 tree fn = CALL_EXPR_FN (callexpr);
1866 bitmap argmap = get_nonnull_args (TREE_TYPE (TREE_TYPE (fn)));
1868 for (unsigned argno = 1; ; ++argno)
1870 code = (enum tree_code) va_arg (ap, int);
1872 switch (code)
1874 case 0:
1875 /* This signifies an ellipses, any further arguments are all ok. */
1876 res = true;
1877 goto end;
1878 case VOID_TYPE:
1879 /* This signifies an endlink, if no arguments remain, return
1880 true, otherwise return false. */
1881 res = !more_const_call_expr_args_p (&iter);
1882 goto end;
1883 case POINTER_TYPE:
1884 /* The actual argument must be nonnull when either the whole
1885 called function has been declared nonnull, or when the formal
1886 argument corresponding to the actual argument has been. */
1887 if (argmap
1888 && (bitmap_empty_p (argmap) || bitmap_bit_p (argmap, argno)))
1890 arg = next_const_call_expr_arg (&iter);
1891 if (!validate_arg (arg, code) || integer_zerop (arg))
1892 goto end;
1893 break;
1895 /* FALLTHRU */
1896 default:
1897 /* If no parameters remain or the parameter's code does not
1898 match the specified code, return false. Otherwise continue
1899 checking any remaining arguments. */
1900 arg = next_const_call_expr_arg (&iter);
1901 if (!validate_arg (arg, code))
1902 goto end;
1903 break;
1907 /* We need gotos here since we can only have one VA_CLOSE in a
1908 function. */
1909 end: ;
1910 va_end (ap);
1912 BITMAP_FREE (argmap);
1914 return res;
1917 /* Expand a call to __builtin_nonlocal_goto. We're passed the target label
1918 and the address of the save area. */
1920 static rtx
1921 expand_builtin_nonlocal_goto (tree exp)
1923 tree t_label, t_save_area;
1924 rtx r_label, r_save_area, r_fp, r_sp;
1925 rtx_insn *insn;
1927 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
1928 return NULL_RTX;
1930 t_label = CALL_EXPR_ARG (exp, 0);
1931 t_save_area = CALL_EXPR_ARG (exp, 1);
1933 r_label = expand_normal (t_label);
1934 r_label = convert_memory_address (Pmode, r_label);
1935 r_save_area = expand_normal (t_save_area);
1936 r_save_area = convert_memory_address (Pmode, r_save_area);
1937 /* Copy the address of the save location to a register just in case it was
1938 based on the frame pointer. */
1939 r_save_area = copy_to_reg (r_save_area);
1940 r_fp = gen_rtx_MEM (Pmode, r_save_area);
1941 r_sp = gen_rtx_MEM (STACK_SAVEAREA_MODE (SAVE_NONLOCAL),
1942 plus_constant (Pmode, r_save_area,
1943 GET_MODE_SIZE (Pmode)));
1945 crtl->has_nonlocal_goto = 1;
1947 /* ??? We no longer need to pass the static chain value, afaik. */
1948 if (targetm.have_nonlocal_goto ())
1949 emit_insn (targetm.gen_nonlocal_goto (const0_rtx, r_label, r_sp, r_fp));
1950 else
1952 emit_clobber (gen_rtx_MEM (BLKmode, gen_rtx_SCRATCH (VOIDmode)));
1953 emit_clobber (gen_rtx_MEM (BLKmode, hard_frame_pointer_rtx));
1955 r_label = copy_to_reg (r_label);
1957 /* Restore the frame pointer and stack pointer. We must use a
1958 temporary since the setjmp buffer may be a local. */
1959 r_fp = copy_to_reg (r_fp);
1960 emit_stack_restore (SAVE_NONLOCAL, r_sp);
1962 /* Ensure the frame pointer move is not optimized. */
1963 emit_insn (gen_blockage ());
1964 emit_clobber (hard_frame_pointer_rtx);
1965 emit_clobber (frame_pointer_rtx);
1966 emit_move_insn (hard_frame_pointer_rtx, r_fp);
1968 /* USE of hard_frame_pointer_rtx added for consistency;
1969 not clear if really needed. */
1970 emit_use (hard_frame_pointer_rtx);
1971 emit_use (stack_pointer_rtx);
1973 /* If the architecture is using a GP register, we must
1974 conservatively assume that the target function makes use of it.
1975 The prologue of functions with nonlocal gotos must therefore
1976 initialize the GP register to the appropriate value, and we
1977 must then make sure that this value is live at the point
1978 of the jump. (Note that this doesn't necessarily apply
1979 to targets with a nonlocal_goto pattern; they are free
1980 to implement it in their own way. Note also that this is
1981 a no-op if the GP register is a global invariant.) */
1982 unsigned regnum = PIC_OFFSET_TABLE_REGNUM;
1983 if (regnum != INVALID_REGNUM && fixed_regs[regnum])
1984 emit_use (pic_offset_table_rtx);
1986 emit_indirect_jump (r_label);
1989 /* Search backwards to the jump insn and mark it as a
1990 non-local goto. */
1991 for (insn = get_last_insn (); insn; insn = PREV_INSN (insn))
1993 if (JUMP_P (insn))
1995 add_reg_note (insn, REG_NON_LOCAL_GOTO, const0_rtx);
1996 break;
1998 else if (CALL_P (insn))
1999 break;
2002 return const0_rtx;
2005 /* __builtin_update_setjmp_buf is passed a pointer to an array of five words
2006 (not all will be used on all machines) that was passed to __builtin_setjmp.
2007 It updates the stack pointer in that block to the current value. This is
2008 also called directly by the SJLJ exception handling code. */
2010 void
2011 expand_builtin_update_setjmp_buf (rtx buf_addr)
2013 machine_mode sa_mode = STACK_SAVEAREA_MODE (SAVE_NONLOCAL);
2014 buf_addr = convert_memory_address (Pmode, buf_addr);
2015 rtx stack_save
2016 = gen_rtx_MEM (sa_mode,
2017 memory_address
2018 (sa_mode,
2019 plus_constant (Pmode, buf_addr,
2020 2 * GET_MODE_SIZE (Pmode))));
2022 emit_stack_save (SAVE_NONLOCAL, &stack_save);
2025 /* Expand a call to __builtin_prefetch. For a target that does not support
2026 data prefetch, evaluate the memory address argument in case it has side
2027 effects. */
2029 static void
2030 expand_builtin_prefetch (tree exp)
2032 tree arg0, arg1, arg2;
2033 int nargs;
2034 rtx op0, op1, op2;
2036 if (!validate_arglist (exp, POINTER_TYPE, 0))
2037 return;
2039 arg0 = CALL_EXPR_ARG (exp, 0);
2041 /* Arguments 1 and 2 are optional; argument 1 (read/write) defaults to
2042 zero (read) and argument 2 (locality) defaults to 3 (high degree of
2043 locality). */
2044 nargs = call_expr_nargs (exp);
2045 if (nargs > 1)
2046 arg1 = CALL_EXPR_ARG (exp, 1);
2047 else
2048 arg1 = integer_zero_node;
2049 if (nargs > 2)
2050 arg2 = CALL_EXPR_ARG (exp, 2);
2051 else
2052 arg2 = integer_three_node;
2054 /* Argument 0 is an address. */
2055 op0 = expand_expr (arg0, NULL_RTX, Pmode, EXPAND_NORMAL);
2057 /* Argument 1 (read/write flag) must be a compile-time constant int. */
2058 if (TREE_CODE (arg1) != INTEGER_CST)
2060 error ("second argument to %<__builtin_prefetch%> must be a constant");
2061 arg1 = integer_zero_node;
2063 op1 = expand_normal (arg1);
2064 /* Argument 1 must be either zero or one. */
2065 if (INTVAL (op1) != 0 && INTVAL (op1) != 1)
2067 warning (0, "invalid second argument to %<__builtin_prefetch%>;"
2068 " using zero");
2069 op1 = const0_rtx;
2072 /* Argument 2 (locality) must be a compile-time constant int. */
2073 if (TREE_CODE (arg2) != INTEGER_CST)
2075 error ("third argument to %<__builtin_prefetch%> must be a constant");
2076 arg2 = integer_zero_node;
2078 op2 = expand_normal (arg2);
2079 /* Argument 2 must be 0, 1, 2, or 3. */
2080 if (INTVAL (op2) < 0 || INTVAL (op2) > 3)
2082 warning (0, "invalid third argument to %<__builtin_prefetch%>; using zero");
2083 op2 = const0_rtx;
2086 if (targetm.have_prefetch ())
2088 class expand_operand ops[3];
2090 create_address_operand (&ops[0], op0);
2091 create_integer_operand (&ops[1], INTVAL (op1));
2092 create_integer_operand (&ops[2], INTVAL (op2));
2093 if (maybe_expand_insn (targetm.code_for_prefetch, 3, ops))
2094 return;
2097 /* Don't do anything with direct references to volatile memory, but
2098 generate code to handle other side effects. */
2099 if (!MEM_P (op0) && side_effects_p (op0))
2100 emit_insn (op0);
2103 /* Get a MEM rtx for expression EXP which is the address of an operand
2104 to be used in a string instruction (cmpstrsi, cpymemsi, ..). LEN is
2105 the maximum length of the block of memory that might be accessed or
2106 NULL if unknown. */
2108 static rtx
2109 get_memory_rtx (tree exp, tree len)
2111 tree orig_exp = exp;
2112 rtx addr, mem;
2114 /* When EXP is not resolved SAVE_EXPR, MEM_ATTRS can be still derived
2115 from its expression, for expr->a.b only <variable>.a.b is recorded. */
2116 if (TREE_CODE (exp) == SAVE_EXPR && !SAVE_EXPR_RESOLVED_P (exp))
2117 exp = TREE_OPERAND (exp, 0);
2119 addr = expand_expr (orig_exp, NULL_RTX, ptr_mode, EXPAND_NORMAL);
2120 mem = gen_rtx_MEM (BLKmode, memory_address (BLKmode, addr));
2122 /* Get an expression we can use to find the attributes to assign to MEM.
2123 First remove any nops. */
2124 while (CONVERT_EXPR_P (exp)
2125 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (exp, 0))))
2126 exp = TREE_OPERAND (exp, 0);
2128 /* Build a MEM_REF representing the whole accessed area as a byte blob,
2129 (as builtin stringops may alias with anything). */
2130 exp = fold_build2 (MEM_REF,
2131 build_array_type (char_type_node,
2132 build_range_type (sizetype,
2133 size_one_node, len)),
2134 exp, build_int_cst (ptr_type_node, 0));
2136 /* If the MEM_REF has no acceptable address, try to get the base object
2137 from the original address we got, and build an all-aliasing
2138 unknown-sized access to that one. */
2139 if (is_gimple_mem_ref_addr (TREE_OPERAND (exp, 0)))
2140 set_mem_attributes (mem, exp, 0);
2141 else if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
2142 && (exp = get_base_address (TREE_OPERAND (TREE_OPERAND (exp, 0),
2143 0))))
2145 exp = build_fold_addr_expr (exp);
2146 exp = fold_build2 (MEM_REF,
2147 build_array_type (char_type_node,
2148 build_range_type (sizetype,
2149 size_zero_node,
2150 NULL)),
2151 exp, build_int_cst (ptr_type_node, 0));
2152 set_mem_attributes (mem, exp, 0);
2154 set_mem_alias_set (mem, 0);
2155 return mem;
2158 /* Built-in functions to perform an untyped call and return. */
2160 #define apply_args_mode \
2161 (this_target_builtins->x_apply_args_mode)
2162 #define apply_result_mode \
2163 (this_target_builtins->x_apply_result_mode)
2165 /* Return the size required for the block returned by __builtin_apply_args,
2166 and initialize apply_args_mode. */
2168 static int
2169 apply_args_size (void)
2171 static int size = -1;
2172 int align;
2173 unsigned int regno;
2175 /* The values computed by this function never change. */
2176 if (size < 0)
2178 /* The first value is the incoming arg-pointer. */
2179 size = GET_MODE_SIZE (Pmode);
2181 /* The second value is the structure value address unless this is
2182 passed as an "invisible" first argument. */
2183 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
2184 size += GET_MODE_SIZE (Pmode);
2186 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2187 if (FUNCTION_ARG_REGNO_P (regno))
2189 fixed_size_mode mode = targetm.calls.get_raw_arg_mode (regno);
2191 gcc_assert (mode != VOIDmode);
2193 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
2194 if (size % align != 0)
2195 size = CEIL (size, align) * align;
2196 size += GET_MODE_SIZE (mode);
2197 apply_args_mode[regno] = mode;
2199 else
2201 apply_args_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
2204 return size;
2207 /* Return the size required for the block returned by __builtin_apply,
2208 and initialize apply_result_mode. */
2210 static int
2211 apply_result_size (void)
2213 static int size = -1;
2214 int align, regno;
2216 /* The values computed by this function never change. */
2217 if (size < 0)
2219 size = 0;
2221 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2222 if (targetm.calls.function_value_regno_p (regno))
2224 fixed_size_mode mode = targetm.calls.get_raw_result_mode (regno);
2226 gcc_assert (mode != VOIDmode);
2228 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
2229 if (size % align != 0)
2230 size = CEIL (size, align) * align;
2231 size += GET_MODE_SIZE (mode);
2232 apply_result_mode[regno] = mode;
2234 else
2235 apply_result_mode[regno] = as_a <fixed_size_mode> (VOIDmode);
2237 /* Allow targets that use untyped_call and untyped_return to override
2238 the size so that machine-specific information can be stored here. */
2239 #ifdef APPLY_RESULT_SIZE
2240 size = APPLY_RESULT_SIZE;
2241 #endif
2243 return size;
2246 /* Create a vector describing the result block RESULT. If SAVEP is true,
2247 the result block is used to save the values; otherwise it is used to
2248 restore the values. */
2250 static rtx
2251 result_vector (int savep, rtx result)
2253 int regno, size, align, nelts;
2254 fixed_size_mode mode;
2255 rtx reg, mem;
2256 rtx *savevec = XALLOCAVEC (rtx, FIRST_PSEUDO_REGISTER);
2258 size = nelts = 0;
2259 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2260 if ((mode = apply_result_mode[regno]) != VOIDmode)
2262 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
2263 if (size % align != 0)
2264 size = CEIL (size, align) * align;
2265 reg = gen_rtx_REG (mode, savep ? regno : INCOMING_REGNO (regno));
2266 mem = adjust_address (result, mode, size);
2267 savevec[nelts++] = (savep
2268 ? gen_rtx_SET (mem, reg)
2269 : gen_rtx_SET (reg, mem));
2270 size += GET_MODE_SIZE (mode);
2272 return gen_rtx_PARALLEL (VOIDmode, gen_rtvec_v (nelts, savevec));
2275 /* Save the state required to perform an untyped call with the same
2276 arguments as were passed to the current function. */
2278 static rtx
2279 expand_builtin_apply_args_1 (void)
2281 rtx registers, tem;
2282 int size, align, regno;
2283 fixed_size_mode mode;
2284 rtx struct_incoming_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 1);
2286 /* Create a block where the arg-pointer, structure value address,
2287 and argument registers can be saved. */
2288 registers = assign_stack_local (BLKmode, apply_args_size (), -1);
2290 /* Walk past the arg-pointer and structure value address. */
2291 size = GET_MODE_SIZE (Pmode);
2292 if (targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0))
2293 size += GET_MODE_SIZE (Pmode);
2295 /* Save each register used in calling a function to the block. */
2296 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2297 if ((mode = apply_args_mode[regno]) != VOIDmode)
2299 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
2300 if (size % align != 0)
2301 size = CEIL (size, align) * align;
2303 tem = gen_rtx_REG (mode, INCOMING_REGNO (regno));
2305 emit_move_insn (adjust_address (registers, mode, size), tem);
2306 size += GET_MODE_SIZE (mode);
2309 /* Save the arg pointer to the block. */
2310 tem = copy_to_reg (crtl->args.internal_arg_pointer);
2311 /* We need the pointer as the caller actually passed them to us, not
2312 as we might have pretended they were passed. Make sure it's a valid
2313 operand, as emit_move_insn isn't expected to handle a PLUS. */
2314 if (STACK_GROWS_DOWNWARD)
2316 = force_operand (plus_constant (Pmode, tem,
2317 crtl->args.pretend_args_size),
2318 NULL_RTX);
2319 emit_move_insn (adjust_address (registers, Pmode, 0), tem);
2321 size = GET_MODE_SIZE (Pmode);
2323 /* Save the structure value address unless this is passed as an
2324 "invisible" first argument. */
2325 if (struct_incoming_value)
2326 emit_move_insn (adjust_address (registers, Pmode, size),
2327 copy_to_reg (struct_incoming_value));
2329 /* Return the address of the block. */
2330 return copy_addr_to_reg (XEXP (registers, 0));
2333 /* __builtin_apply_args returns block of memory allocated on
2334 the stack into which is stored the arg pointer, structure
2335 value address, static chain, and all the registers that might
2336 possibly be used in performing a function call. The code is
2337 moved to the start of the function so the incoming values are
2338 saved. */
2340 static rtx
2341 expand_builtin_apply_args (void)
2343 /* Don't do __builtin_apply_args more than once in a function.
2344 Save the result of the first call and reuse it. */
2345 if (apply_args_value != 0)
2346 return apply_args_value;
2348 /* When this function is called, it means that registers must be
2349 saved on entry to this function. So we migrate the
2350 call to the first insn of this function. */
2351 rtx temp;
2353 start_sequence ();
2354 temp = expand_builtin_apply_args_1 ();
2355 rtx_insn *seq = get_insns ();
2356 end_sequence ();
2358 apply_args_value = temp;
2360 /* Put the insns after the NOTE that starts the function.
2361 If this is inside a start_sequence, make the outer-level insn
2362 chain current, so the code is placed at the start of the
2363 function. If internal_arg_pointer is a non-virtual pseudo,
2364 it needs to be placed after the function that initializes
2365 that pseudo. */
2366 push_topmost_sequence ();
2367 if (REG_P (crtl->args.internal_arg_pointer)
2368 && REGNO (crtl->args.internal_arg_pointer) > LAST_VIRTUAL_REGISTER)
2369 emit_insn_before (seq, parm_birth_insn);
2370 else
2371 emit_insn_before (seq, NEXT_INSN (entry_of_function ()));
2372 pop_topmost_sequence ();
2373 return temp;
2377 /* Perform an untyped call and save the state required to perform an
2378 untyped return of whatever value was returned by the given function. */
2380 static rtx
2381 expand_builtin_apply (rtx function, rtx arguments, rtx argsize)
2383 int size, align, regno;
2384 fixed_size_mode mode;
2385 rtx incoming_args, result, reg, dest, src;
2386 rtx_call_insn *call_insn;
2387 rtx old_stack_level = 0;
2388 rtx call_fusage = 0;
2389 rtx struct_value = targetm.calls.struct_value_rtx (cfun ? TREE_TYPE (cfun->decl) : 0, 0);
2391 arguments = convert_memory_address (Pmode, arguments);
2393 /* Create a block where the return registers can be saved. */
2394 result = assign_stack_local (BLKmode, apply_result_size (), -1);
2396 /* Fetch the arg pointer from the ARGUMENTS block. */
2397 incoming_args = gen_reg_rtx (Pmode);
2398 emit_move_insn (incoming_args, gen_rtx_MEM (Pmode, arguments));
2399 if (!STACK_GROWS_DOWNWARD)
2400 incoming_args = expand_simple_binop (Pmode, MINUS, incoming_args, argsize,
2401 incoming_args, 0, OPTAB_LIB_WIDEN);
2403 /* Push a new argument block and copy the arguments. Do not allow
2404 the (potential) memcpy call below to interfere with our stack
2405 manipulations. */
2406 do_pending_stack_adjust ();
2407 NO_DEFER_POP;
2409 /* Save the stack with nonlocal if available. */
2410 if (targetm.have_save_stack_nonlocal ())
2411 emit_stack_save (SAVE_NONLOCAL, &old_stack_level);
2412 else
2413 emit_stack_save (SAVE_BLOCK, &old_stack_level);
2415 /* Allocate a block of memory onto the stack and copy the memory
2416 arguments to the outgoing arguments address. We can pass TRUE
2417 as the 4th argument because we just saved the stack pointer
2418 and will restore it right after the call. */
2419 allocate_dynamic_stack_space (argsize, 0, BIGGEST_ALIGNMENT, -1, true);
2421 /* Set DRAP flag to true, even though allocate_dynamic_stack_space
2422 may have already set current_function_calls_alloca to true.
2423 current_function_calls_alloca won't be set if argsize is zero,
2424 so we have to guarantee need_drap is true here. */
2425 if (SUPPORTS_STACK_ALIGNMENT)
2426 crtl->need_drap = true;
2428 dest = virtual_outgoing_args_rtx;
2429 if (!STACK_GROWS_DOWNWARD)
2431 if (CONST_INT_P (argsize))
2432 dest = plus_constant (Pmode, dest, -INTVAL (argsize));
2433 else
2434 dest = gen_rtx_PLUS (Pmode, dest, negate_rtx (Pmode, argsize));
2436 dest = gen_rtx_MEM (BLKmode, dest);
2437 set_mem_align (dest, PARM_BOUNDARY);
2438 src = gen_rtx_MEM (BLKmode, incoming_args);
2439 set_mem_align (src, PARM_BOUNDARY);
2440 emit_block_move (dest, src, argsize, BLOCK_OP_NORMAL);
2442 /* Refer to the argument block. */
2443 apply_args_size ();
2444 arguments = gen_rtx_MEM (BLKmode, arguments);
2445 set_mem_align (arguments, PARM_BOUNDARY);
2447 /* Walk past the arg-pointer and structure value address. */
2448 size = GET_MODE_SIZE (Pmode);
2449 if (struct_value)
2450 size += GET_MODE_SIZE (Pmode);
2452 /* Restore each of the registers previously saved. Make USE insns
2453 for each of these registers for use in making the call. */
2454 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2455 if ((mode = apply_args_mode[regno]) != VOIDmode)
2457 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
2458 if (size % align != 0)
2459 size = CEIL (size, align) * align;
2460 reg = gen_rtx_REG (mode, regno);
2461 emit_move_insn (reg, adjust_address (arguments, mode, size));
2462 use_reg (&call_fusage, reg);
2463 size += GET_MODE_SIZE (mode);
2466 /* Restore the structure value address unless this is passed as an
2467 "invisible" first argument. */
2468 size = GET_MODE_SIZE (Pmode);
2469 if (struct_value)
2471 rtx value = gen_reg_rtx (Pmode);
2472 emit_move_insn (value, adjust_address (arguments, Pmode, size));
2473 emit_move_insn (struct_value, value);
2474 if (REG_P (struct_value))
2475 use_reg (&call_fusage, struct_value);
2478 /* All arguments and registers used for the call are set up by now! */
2479 function = prepare_call_address (NULL, function, NULL, &call_fusage, 0, 0);
2481 /* Ensure address is valid. SYMBOL_REF is already valid, so no need,
2482 and we don't want to load it into a register as an optimization,
2483 because prepare_call_address already did it if it should be done. */
2484 if (GET_CODE (function) != SYMBOL_REF)
2485 function = memory_address (FUNCTION_MODE, function);
2487 /* Generate the actual call instruction and save the return value. */
2488 if (targetm.have_untyped_call ())
2490 rtx mem = gen_rtx_MEM (FUNCTION_MODE, function);
2491 rtx_insn *seq = targetm.gen_untyped_call (mem, result,
2492 result_vector (1, result));
2493 for (rtx_insn *insn = seq; insn; insn = NEXT_INSN (insn))
2494 if (CALL_P (insn))
2495 add_reg_note (insn, REG_UNTYPED_CALL, NULL_RTX);
2496 emit_insn (seq);
2498 else if (targetm.have_call_value ())
2500 rtx valreg = 0;
2502 /* Locate the unique return register. It is not possible to
2503 express a call that sets more than one return register using
2504 call_value; use untyped_call for that. In fact, untyped_call
2505 only needs to save the return registers in the given block. */
2506 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2507 if ((mode = apply_result_mode[regno]) != VOIDmode)
2509 gcc_assert (!valreg); /* have_untyped_call required. */
2511 valreg = gen_rtx_REG (mode, regno);
2514 emit_insn (targetm.gen_call_value (valreg,
2515 gen_rtx_MEM (FUNCTION_MODE, function),
2516 const0_rtx, NULL_RTX, const0_rtx));
2518 emit_move_insn (adjust_address (result, GET_MODE (valreg), 0), valreg);
2520 else
2521 gcc_unreachable ();
2523 /* Find the CALL insn we just emitted, and attach the register usage
2524 information. */
2525 call_insn = last_call_insn ();
2526 add_function_usage_to (call_insn, call_fusage);
2528 /* Restore the stack. */
2529 if (targetm.have_save_stack_nonlocal ())
2530 emit_stack_restore (SAVE_NONLOCAL, old_stack_level);
2531 else
2532 emit_stack_restore (SAVE_BLOCK, old_stack_level);
2533 fixup_args_size_notes (call_insn, get_last_insn (), 0);
2535 OK_DEFER_POP;
2537 /* Return the address of the result block. */
2538 result = copy_addr_to_reg (XEXP (result, 0));
2539 return convert_memory_address (ptr_mode, result);
2542 /* Perform an untyped return. */
2544 static void
2545 expand_builtin_return (rtx result)
2547 int size, align, regno;
2548 fixed_size_mode mode;
2549 rtx reg;
2550 rtx_insn *call_fusage = 0;
2552 result = convert_memory_address (Pmode, result);
2554 apply_result_size ();
2555 result = gen_rtx_MEM (BLKmode, result);
2557 if (targetm.have_untyped_return ())
2559 rtx vector = result_vector (0, result);
2560 emit_jump_insn (targetm.gen_untyped_return (result, vector));
2561 emit_barrier ();
2562 return;
2565 /* Restore the return value and note that each value is used. */
2566 size = 0;
2567 for (regno = 0; regno < FIRST_PSEUDO_REGISTER; regno++)
2568 if ((mode = apply_result_mode[regno]) != VOIDmode)
2570 align = GET_MODE_ALIGNMENT (mode) / BITS_PER_UNIT;
2571 if (size % align != 0)
2572 size = CEIL (size, align) * align;
2573 reg = gen_rtx_REG (mode, INCOMING_REGNO (regno));
2574 emit_move_insn (reg, adjust_address (result, mode, size));
2576 push_to_sequence (call_fusage);
2577 emit_use (reg);
2578 call_fusage = get_insns ();
2579 end_sequence ();
2580 size += GET_MODE_SIZE (mode);
2583 /* Put the USE insns before the return. */
2584 emit_insn (call_fusage);
2586 /* Return whatever values was restored by jumping directly to the end
2587 of the function. */
2588 expand_naked_return ();
2591 /* Used by expand_builtin_classify_type and fold_builtin_classify_type. */
2593 static enum type_class
2594 type_to_class (tree type)
2596 switch (TREE_CODE (type))
2598 case VOID_TYPE: return void_type_class;
2599 case INTEGER_TYPE: return integer_type_class;
2600 case ENUMERAL_TYPE: return enumeral_type_class;
2601 case BOOLEAN_TYPE: return boolean_type_class;
2602 case POINTER_TYPE: return pointer_type_class;
2603 case REFERENCE_TYPE: return reference_type_class;
2604 case OFFSET_TYPE: return offset_type_class;
2605 case REAL_TYPE: return real_type_class;
2606 case COMPLEX_TYPE: return complex_type_class;
2607 case FUNCTION_TYPE: return function_type_class;
2608 case METHOD_TYPE: return method_type_class;
2609 case RECORD_TYPE: return record_type_class;
2610 case UNION_TYPE:
2611 case QUAL_UNION_TYPE: return union_type_class;
2612 case ARRAY_TYPE: return (TYPE_STRING_FLAG (type)
2613 ? string_type_class : array_type_class);
2614 case LANG_TYPE: return lang_type_class;
2615 case OPAQUE_TYPE: return opaque_type_class;
2616 default: return no_type_class;
2620 /* Expand a call EXP to __builtin_classify_type. */
2622 static rtx
2623 expand_builtin_classify_type (tree exp)
2625 if (call_expr_nargs (exp))
2626 return GEN_INT (type_to_class (TREE_TYPE (CALL_EXPR_ARG (exp, 0))));
2627 return GEN_INT (no_type_class);
2630 /* This helper macro, meant to be used in mathfn_built_in below, determines
2631 which among a set of builtin math functions is appropriate for a given type
2632 mode. The `F' (float) and `L' (long double) are automatically generated
2633 from the 'double' case. If a function supports the _Float<N> and _Float<N>X
2634 types, there are additional types that are considered with 'F32', 'F64',
2635 'F128', etc. suffixes. */
2636 #define CASE_MATHFN(MATHFN) \
2637 CASE_CFN_##MATHFN: \
2638 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
2639 fcodel = BUILT_IN_##MATHFN##L ; break;
2640 /* Similar to the above, but also add support for the _Float<N> and _Float<N>X
2641 types. */
2642 #define CASE_MATHFN_FLOATN(MATHFN) \
2643 CASE_CFN_##MATHFN: \
2644 fcode = BUILT_IN_##MATHFN; fcodef = BUILT_IN_##MATHFN##F ; \
2645 fcodel = BUILT_IN_##MATHFN##L ; fcodef16 = BUILT_IN_##MATHFN##F16 ; \
2646 fcodef32 = BUILT_IN_##MATHFN##F32; fcodef64 = BUILT_IN_##MATHFN##F64 ; \
2647 fcodef128 = BUILT_IN_##MATHFN##F128 ; fcodef32x = BUILT_IN_##MATHFN##F32X ; \
2648 fcodef64x = BUILT_IN_##MATHFN##F64X ; fcodef128x = BUILT_IN_##MATHFN##F128X ;\
2649 break;
2650 /* Similar to above, but appends _R after any F/L suffix. */
2651 #define CASE_MATHFN_REENT(MATHFN) \
2652 case CFN_BUILT_IN_##MATHFN##_R: \
2653 case CFN_BUILT_IN_##MATHFN##F_R: \
2654 case CFN_BUILT_IN_##MATHFN##L_R: \
2655 fcode = BUILT_IN_##MATHFN##_R; fcodef = BUILT_IN_##MATHFN##F_R ; \
2656 fcodel = BUILT_IN_##MATHFN##L_R ; break;
2658 /* Return a function equivalent to FN but operating on floating-point
2659 values of type TYPE, or END_BUILTINS if no such function exists.
2660 This is purely an operation on function codes; it does not guarantee
2661 that the target actually has an implementation of the function. */
2663 static built_in_function
2664 mathfn_built_in_2 (tree type, combined_fn fn)
2666 tree mtype;
2667 built_in_function fcode, fcodef, fcodel;
2668 built_in_function fcodef16 = END_BUILTINS;
2669 built_in_function fcodef32 = END_BUILTINS;
2670 built_in_function fcodef64 = END_BUILTINS;
2671 built_in_function fcodef128 = END_BUILTINS;
2672 built_in_function fcodef32x = END_BUILTINS;
2673 built_in_function fcodef64x = END_BUILTINS;
2674 built_in_function fcodef128x = END_BUILTINS;
2676 switch (fn)
2678 #define SEQ_OF_CASE_MATHFN \
2679 CASE_MATHFN (ACOS) \
2680 CASE_MATHFN (ACOSH) \
2681 CASE_MATHFN (ASIN) \
2682 CASE_MATHFN (ASINH) \
2683 CASE_MATHFN (ATAN) \
2684 CASE_MATHFN (ATAN2) \
2685 CASE_MATHFN (ATANH) \
2686 CASE_MATHFN (CBRT) \
2687 CASE_MATHFN_FLOATN (CEIL) \
2688 CASE_MATHFN (CEXPI) \
2689 CASE_MATHFN_FLOATN (COPYSIGN) \
2690 CASE_MATHFN (COS) \
2691 CASE_MATHFN (COSH) \
2692 CASE_MATHFN (DREM) \
2693 CASE_MATHFN (ERF) \
2694 CASE_MATHFN (ERFC) \
2695 CASE_MATHFN (EXP) \
2696 CASE_MATHFN (EXP10) \
2697 CASE_MATHFN (EXP2) \
2698 CASE_MATHFN (EXPM1) \
2699 CASE_MATHFN (FABS) \
2700 CASE_MATHFN (FDIM) \
2701 CASE_MATHFN_FLOATN (FLOOR) \
2702 CASE_MATHFN_FLOATN (FMA) \
2703 CASE_MATHFN_FLOATN (FMAX) \
2704 CASE_MATHFN_FLOATN (FMIN) \
2705 CASE_MATHFN (FMOD) \
2706 CASE_MATHFN (FREXP) \
2707 CASE_MATHFN (GAMMA) \
2708 CASE_MATHFN_REENT (GAMMA) /* GAMMA_R */ \
2709 CASE_MATHFN (HUGE_VAL) \
2710 CASE_MATHFN (HYPOT) \
2711 CASE_MATHFN (ILOGB) \
2712 CASE_MATHFN (ICEIL) \
2713 CASE_MATHFN (IFLOOR) \
2714 CASE_MATHFN (INF) \
2715 CASE_MATHFN (IRINT) \
2716 CASE_MATHFN (IROUND) \
2717 CASE_MATHFN (ISINF) \
2718 CASE_MATHFN (J0) \
2719 CASE_MATHFN (J1) \
2720 CASE_MATHFN (JN) \
2721 CASE_MATHFN (LCEIL) \
2722 CASE_MATHFN (LDEXP) \
2723 CASE_MATHFN (LFLOOR) \
2724 CASE_MATHFN (LGAMMA) \
2725 CASE_MATHFN_REENT (LGAMMA) /* LGAMMA_R */ \
2726 CASE_MATHFN (LLCEIL) \
2727 CASE_MATHFN (LLFLOOR) \
2728 CASE_MATHFN (LLRINT) \
2729 CASE_MATHFN (LLROUND) \
2730 CASE_MATHFN (LOG) \
2731 CASE_MATHFN (LOG10) \
2732 CASE_MATHFN (LOG1P) \
2733 CASE_MATHFN (LOG2) \
2734 CASE_MATHFN (LOGB) \
2735 CASE_MATHFN (LRINT) \
2736 CASE_MATHFN (LROUND) \
2737 CASE_MATHFN (MODF) \
2738 CASE_MATHFN (NAN) \
2739 CASE_MATHFN (NANS) \
2740 CASE_MATHFN_FLOATN (NEARBYINT) \
2741 CASE_MATHFN (NEXTAFTER) \
2742 CASE_MATHFN (NEXTTOWARD) \
2743 CASE_MATHFN (POW) \
2744 CASE_MATHFN (POWI) \
2745 CASE_MATHFN (POW10) \
2746 CASE_MATHFN (REMAINDER) \
2747 CASE_MATHFN (REMQUO) \
2748 CASE_MATHFN_FLOATN (RINT) \
2749 CASE_MATHFN_FLOATN (ROUND) \
2750 CASE_MATHFN_FLOATN (ROUNDEVEN) \
2751 CASE_MATHFN (SCALB) \
2752 CASE_MATHFN (SCALBLN) \
2753 CASE_MATHFN (SCALBN) \
2754 CASE_MATHFN (SIGNBIT) \
2755 CASE_MATHFN (SIGNIFICAND) \
2756 CASE_MATHFN (SIN) \
2757 CASE_MATHFN (SINCOS) \
2758 CASE_MATHFN (SINH) \
2759 CASE_MATHFN_FLOATN (SQRT) \
2760 CASE_MATHFN (TAN) \
2761 CASE_MATHFN (TANH) \
2762 CASE_MATHFN (TGAMMA) \
2763 CASE_MATHFN_FLOATN (TRUNC) \
2764 CASE_MATHFN (Y0) \
2765 CASE_MATHFN (Y1) \
2766 CASE_MATHFN (YN)
2768 SEQ_OF_CASE_MATHFN
2770 default:
2771 return END_BUILTINS;
2774 mtype = TYPE_MAIN_VARIANT (type);
2775 if (mtype == double_type_node)
2776 return fcode;
2777 else if (mtype == float_type_node)
2778 return fcodef;
2779 else if (mtype == long_double_type_node)
2780 return fcodel;
2781 else if (mtype == float16_type_node)
2782 return fcodef16;
2783 else if (mtype == float32_type_node)
2784 return fcodef32;
2785 else if (mtype == float64_type_node)
2786 return fcodef64;
2787 else if (mtype == float128_type_node)
2788 return fcodef128;
2789 else if (mtype == float32x_type_node)
2790 return fcodef32x;
2791 else if (mtype == float64x_type_node)
2792 return fcodef64x;
2793 else if (mtype == float128x_type_node)
2794 return fcodef128x;
2795 else
2796 return END_BUILTINS;
2799 #undef CASE_MATHFN
2800 #undef CASE_MATHFN_FLOATN
2801 #undef CASE_MATHFN_REENT
2803 /* Return mathematic function equivalent to FN but operating directly on TYPE,
2804 if available. If IMPLICIT_P is true use the implicit builtin declaration,
2805 otherwise use the explicit declaration. If we can't do the conversion,
2806 return null. */
2808 static tree
2809 mathfn_built_in_1 (tree type, combined_fn fn, bool implicit_p)
2811 built_in_function fcode2 = mathfn_built_in_2 (type, fn);
2812 if (fcode2 == END_BUILTINS)
2813 return NULL_TREE;
2815 if (implicit_p && !builtin_decl_implicit_p (fcode2))
2816 return NULL_TREE;
2818 return builtin_decl_explicit (fcode2);
2821 /* Like mathfn_built_in_1, but always use the implicit array. */
2823 tree
2824 mathfn_built_in (tree type, combined_fn fn)
2826 return mathfn_built_in_1 (type, fn, /*implicit=*/ 1);
2829 /* Like mathfn_built_in_1, but take a built_in_function and
2830 always use the implicit array. */
2832 tree
2833 mathfn_built_in (tree type, enum built_in_function fn)
2835 return mathfn_built_in_1 (type, as_combined_fn (fn), /*implicit=*/ 1);
2838 /* Return the type associated with a built in function, i.e., the one
2839 to be passed to mathfn_built_in to get the type-specific
2840 function. */
2842 tree
2843 mathfn_built_in_type (combined_fn fn)
2845 #define CASE_MATHFN(MATHFN) \
2846 case CFN_BUILT_IN_##MATHFN: \
2847 return double_type_node; \
2848 case CFN_BUILT_IN_##MATHFN##F: \
2849 return float_type_node; \
2850 case CFN_BUILT_IN_##MATHFN##L: \
2851 return long_double_type_node;
2853 #define CASE_MATHFN_FLOATN(MATHFN) \
2854 CASE_MATHFN(MATHFN) \
2855 case CFN_BUILT_IN_##MATHFN##F16: \
2856 return float16_type_node; \
2857 case CFN_BUILT_IN_##MATHFN##F32: \
2858 return float32_type_node; \
2859 case CFN_BUILT_IN_##MATHFN##F64: \
2860 return float64_type_node; \
2861 case CFN_BUILT_IN_##MATHFN##F128: \
2862 return float128_type_node; \
2863 case CFN_BUILT_IN_##MATHFN##F32X: \
2864 return float32x_type_node; \
2865 case CFN_BUILT_IN_##MATHFN##F64X: \
2866 return float64x_type_node; \
2867 case CFN_BUILT_IN_##MATHFN##F128X: \
2868 return float128x_type_node;
2870 /* Similar to above, but appends _R after any F/L suffix. */
2871 #define CASE_MATHFN_REENT(MATHFN) \
2872 case CFN_BUILT_IN_##MATHFN##_R: \
2873 return double_type_node; \
2874 case CFN_BUILT_IN_##MATHFN##F_R: \
2875 return float_type_node; \
2876 case CFN_BUILT_IN_##MATHFN##L_R: \
2877 return long_double_type_node;
2879 switch (fn)
2881 SEQ_OF_CASE_MATHFN
2883 default:
2884 return NULL_TREE;
2887 #undef CASE_MATHFN
2888 #undef CASE_MATHFN_FLOATN
2889 #undef CASE_MATHFN_REENT
2890 #undef SEQ_OF_CASE_MATHFN
2893 /* If BUILT_IN_NORMAL function FNDECL has an associated internal function,
2894 return its code, otherwise return IFN_LAST. Note that this function
2895 only tests whether the function is defined in internals.def, not whether
2896 it is actually available on the target. */
2898 internal_fn
2899 associated_internal_fn (tree fndecl)
2901 gcc_checking_assert (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_NORMAL);
2902 tree return_type = TREE_TYPE (TREE_TYPE (fndecl));
2903 switch (DECL_FUNCTION_CODE (fndecl))
2905 #define DEF_INTERNAL_FLT_FN(NAME, FLAGS, OPTAB, TYPE) \
2906 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2907 #define DEF_INTERNAL_FLT_FLOATN_FN(NAME, FLAGS, OPTAB, TYPE) \
2908 CASE_FLT_FN (BUILT_IN_##NAME): return IFN_##NAME; \
2909 CASE_FLT_FN_FLOATN_NX (BUILT_IN_##NAME): return IFN_##NAME;
2910 #define DEF_INTERNAL_INT_FN(NAME, FLAGS, OPTAB, TYPE) \
2911 CASE_INT_FN (BUILT_IN_##NAME): return IFN_##NAME;
2912 #include "internal-fn.def"
2914 CASE_FLT_FN (BUILT_IN_POW10):
2915 return IFN_EXP10;
2917 CASE_FLT_FN (BUILT_IN_DREM):
2918 return IFN_REMAINDER;
2920 CASE_FLT_FN (BUILT_IN_SCALBN):
2921 CASE_FLT_FN (BUILT_IN_SCALBLN):
2922 if (REAL_MODE_FORMAT (TYPE_MODE (return_type))->b == 2)
2923 return IFN_LDEXP;
2924 return IFN_LAST;
2926 default:
2927 return IFN_LAST;
2931 /* If CALL is a call to a BUILT_IN_NORMAL function that could be replaced
2932 on the current target by a call to an internal function, return the
2933 code of that internal function, otherwise return IFN_LAST. The caller
2934 is responsible for ensuring that any side-effects of the built-in
2935 call are dealt with correctly. E.g. if CALL sets errno, the caller
2936 must decide that the errno result isn't needed or make it available
2937 in some other way. */
2939 internal_fn
2940 replacement_internal_fn (gcall *call)
2942 if (gimple_call_builtin_p (call, BUILT_IN_NORMAL))
2944 internal_fn ifn = associated_internal_fn (gimple_call_fndecl (call));
2945 if (ifn != IFN_LAST)
2947 tree_pair types = direct_internal_fn_types (ifn, call);
2948 optimization_type opt_type = bb_optimization_type (gimple_bb (call));
2949 if (direct_internal_fn_supported_p (ifn, types, opt_type))
2950 return ifn;
2953 return IFN_LAST;
2956 /* Expand a call to the builtin trinary math functions (fma).
2957 Return NULL_RTX if a normal call should be emitted rather than expanding the
2958 function in-line. EXP is the expression that is a call to the builtin
2959 function; if convenient, the result should be placed in TARGET.
2960 SUBTARGET may be used as the target for computing one of EXP's
2961 operands. */
2963 static rtx
2964 expand_builtin_mathfn_ternary (tree exp, rtx target, rtx subtarget)
2966 optab builtin_optab;
2967 rtx op0, op1, op2, result;
2968 rtx_insn *insns;
2969 tree fndecl = get_callee_fndecl (exp);
2970 tree arg0, arg1, arg2;
2971 machine_mode mode;
2973 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, REAL_TYPE, VOID_TYPE))
2974 return NULL_RTX;
2976 arg0 = CALL_EXPR_ARG (exp, 0);
2977 arg1 = CALL_EXPR_ARG (exp, 1);
2978 arg2 = CALL_EXPR_ARG (exp, 2);
2980 switch (DECL_FUNCTION_CODE (fndecl))
2982 CASE_FLT_FN (BUILT_IN_FMA):
2983 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
2984 builtin_optab = fma_optab; break;
2985 default:
2986 gcc_unreachable ();
2989 /* Make a suitable register to place result in. */
2990 mode = TYPE_MODE (TREE_TYPE (exp));
2992 /* Before working hard, check whether the instruction is available. */
2993 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
2994 return NULL_RTX;
2996 result = gen_reg_rtx (mode);
2998 /* Always stabilize the argument list. */
2999 CALL_EXPR_ARG (exp, 0) = arg0 = builtin_save_expr (arg0);
3000 CALL_EXPR_ARG (exp, 1) = arg1 = builtin_save_expr (arg1);
3001 CALL_EXPR_ARG (exp, 2) = arg2 = builtin_save_expr (arg2);
3003 op0 = expand_expr (arg0, subtarget, VOIDmode, EXPAND_NORMAL);
3004 op1 = expand_normal (arg1);
3005 op2 = expand_normal (arg2);
3007 start_sequence ();
3009 /* Compute into RESULT.
3010 Set RESULT to wherever the result comes back. */
3011 result = expand_ternary_op (mode, builtin_optab, op0, op1, op2,
3012 result, 0);
3014 /* If we were unable to expand via the builtin, stop the sequence
3015 (without outputting the insns) and call to the library function
3016 with the stabilized argument list. */
3017 if (result == 0)
3019 end_sequence ();
3020 return expand_call (exp, target, target == const0_rtx);
3023 /* Output the entire sequence. */
3024 insns = get_insns ();
3025 end_sequence ();
3026 emit_insn (insns);
3028 return result;
3031 /* Expand a call to the builtin sin and cos math functions.
3032 Return NULL_RTX if a normal call should be emitted rather than expanding the
3033 function in-line. EXP is the expression that is a call to the builtin
3034 function; if convenient, the result should be placed in TARGET.
3035 SUBTARGET may be used as the target for computing one of EXP's
3036 operands. */
3038 static rtx
3039 expand_builtin_mathfn_3 (tree exp, rtx target, rtx subtarget)
3041 optab builtin_optab;
3042 rtx op0;
3043 rtx_insn *insns;
3044 tree fndecl = get_callee_fndecl (exp);
3045 machine_mode mode;
3046 tree arg;
3048 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
3049 return NULL_RTX;
3051 arg = CALL_EXPR_ARG (exp, 0);
3053 switch (DECL_FUNCTION_CODE (fndecl))
3055 CASE_FLT_FN (BUILT_IN_SIN):
3056 CASE_FLT_FN (BUILT_IN_COS):
3057 builtin_optab = sincos_optab; break;
3058 default:
3059 gcc_unreachable ();
3062 /* Make a suitable register to place result in. */
3063 mode = TYPE_MODE (TREE_TYPE (exp));
3065 /* Check if sincos insn is available, otherwise fallback
3066 to sin or cos insn. */
3067 if (optab_handler (builtin_optab, mode) == CODE_FOR_nothing)
3068 switch (DECL_FUNCTION_CODE (fndecl))
3070 CASE_FLT_FN (BUILT_IN_SIN):
3071 builtin_optab = sin_optab; break;
3072 CASE_FLT_FN (BUILT_IN_COS):
3073 builtin_optab = cos_optab; break;
3074 default:
3075 gcc_unreachable ();
3078 /* Before working hard, check whether the instruction is available. */
3079 if (optab_handler (builtin_optab, mode) != CODE_FOR_nothing)
3081 rtx result = gen_reg_rtx (mode);
3083 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
3084 need to expand the argument again. This way, we will not perform
3085 side-effects more the once. */
3086 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
3088 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
3090 start_sequence ();
3092 /* Compute into RESULT.
3093 Set RESULT to wherever the result comes back. */
3094 if (builtin_optab == sincos_optab)
3096 int ok;
3098 switch (DECL_FUNCTION_CODE (fndecl))
3100 CASE_FLT_FN (BUILT_IN_SIN):
3101 ok = expand_twoval_unop (builtin_optab, op0, 0, result, 0);
3102 break;
3103 CASE_FLT_FN (BUILT_IN_COS):
3104 ok = expand_twoval_unop (builtin_optab, op0, result, 0, 0);
3105 break;
3106 default:
3107 gcc_unreachable ();
3109 gcc_assert (ok);
3111 else
3112 result = expand_unop (mode, builtin_optab, op0, result, 0);
3114 if (result != 0)
3116 /* Output the entire sequence. */
3117 insns = get_insns ();
3118 end_sequence ();
3119 emit_insn (insns);
3120 return result;
3123 /* If we were unable to expand via the builtin, stop the sequence
3124 (without outputting the insns) and call to the library function
3125 with the stabilized argument list. */
3126 end_sequence ();
3129 return expand_call (exp, target, target == const0_rtx);
3132 /* Given an interclass math builtin decl FNDECL and it's argument ARG
3133 return an RTL instruction code that implements the functionality.
3134 If that isn't possible or available return CODE_FOR_nothing. */
3136 static enum insn_code
3137 interclass_mathfn_icode (tree arg, tree fndecl)
3139 bool errno_set = false;
3140 optab builtin_optab = unknown_optab;
3141 machine_mode mode;
3143 switch (DECL_FUNCTION_CODE (fndecl))
3145 CASE_FLT_FN (BUILT_IN_ILOGB):
3146 errno_set = true; builtin_optab = ilogb_optab; break;
3147 CASE_FLT_FN (BUILT_IN_ISINF):
3148 builtin_optab = isinf_optab; break;
3149 case BUILT_IN_ISNORMAL:
3150 case BUILT_IN_ISFINITE:
3151 CASE_FLT_FN (BUILT_IN_FINITE):
3152 case BUILT_IN_FINITED32:
3153 case BUILT_IN_FINITED64:
3154 case BUILT_IN_FINITED128:
3155 case BUILT_IN_ISINFD32:
3156 case BUILT_IN_ISINFD64:
3157 case BUILT_IN_ISINFD128:
3158 /* These builtins have no optabs (yet). */
3159 break;
3160 default:
3161 gcc_unreachable ();
3164 /* There's no easy way to detect the case we need to set EDOM. */
3165 if (flag_errno_math && errno_set)
3166 return CODE_FOR_nothing;
3168 /* Optab mode depends on the mode of the input argument. */
3169 mode = TYPE_MODE (TREE_TYPE (arg));
3171 if (builtin_optab)
3172 return optab_handler (builtin_optab, mode);
3173 return CODE_FOR_nothing;
3176 /* Expand a call to one of the builtin math functions that operate on
3177 floating point argument and output an integer result (ilogb, isinf,
3178 isnan, etc).
3179 Return 0 if a normal call should be emitted rather than expanding the
3180 function in-line. EXP is the expression that is a call to the builtin
3181 function; if convenient, the result should be placed in TARGET. */
3183 static rtx
3184 expand_builtin_interclass_mathfn (tree exp, rtx target)
3186 enum insn_code icode = CODE_FOR_nothing;
3187 rtx op0;
3188 tree fndecl = get_callee_fndecl (exp);
3189 machine_mode mode;
3190 tree arg;
3192 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
3193 return NULL_RTX;
3195 arg = CALL_EXPR_ARG (exp, 0);
3196 icode = interclass_mathfn_icode (arg, fndecl);
3197 mode = TYPE_MODE (TREE_TYPE (arg));
3199 if (icode != CODE_FOR_nothing)
3201 class expand_operand ops[1];
3202 rtx_insn *last = get_last_insn ();
3203 tree orig_arg = arg;
3205 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
3206 need to expand the argument again. This way, we will not perform
3207 side-effects more the once. */
3208 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
3210 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
3212 if (mode != GET_MODE (op0))
3213 op0 = convert_to_mode (mode, op0, 0);
3215 create_output_operand (&ops[0], target, TYPE_MODE (TREE_TYPE (exp)));
3216 if (maybe_legitimize_operands (icode, 0, 1, ops)
3217 && maybe_emit_unop_insn (icode, ops[0].value, op0, UNKNOWN))
3218 return ops[0].value;
3220 delete_insns_since (last);
3221 CALL_EXPR_ARG (exp, 0) = orig_arg;
3224 return NULL_RTX;
3227 /* Expand a call to the builtin sincos math function.
3228 Return NULL_RTX if a normal call should be emitted rather than expanding the
3229 function in-line. EXP is the expression that is a call to the builtin
3230 function. */
3232 static rtx
3233 expand_builtin_sincos (tree exp)
3235 rtx op0, op1, op2, target1, target2;
3236 machine_mode mode;
3237 tree arg, sinp, cosp;
3238 int result;
3239 location_t loc = EXPR_LOCATION (exp);
3240 tree alias_type, alias_off;
3242 if (!validate_arglist (exp, REAL_TYPE,
3243 POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
3244 return NULL_RTX;
3246 arg = CALL_EXPR_ARG (exp, 0);
3247 sinp = CALL_EXPR_ARG (exp, 1);
3248 cosp = CALL_EXPR_ARG (exp, 2);
3250 /* Make a suitable register to place result in. */
3251 mode = TYPE_MODE (TREE_TYPE (arg));
3253 /* Check if sincos insn is available, otherwise emit the call. */
3254 if (optab_handler (sincos_optab, mode) == CODE_FOR_nothing)
3255 return NULL_RTX;
3257 target1 = gen_reg_rtx (mode);
3258 target2 = gen_reg_rtx (mode);
3260 op0 = expand_normal (arg);
3261 alias_type = build_pointer_type_for_mode (TREE_TYPE (arg), ptr_mode, true);
3262 alias_off = build_int_cst (alias_type, 0);
3263 op1 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
3264 sinp, alias_off));
3265 op2 = expand_normal (fold_build2_loc (loc, MEM_REF, TREE_TYPE (arg),
3266 cosp, alias_off));
3268 /* Compute into target1 and target2.
3269 Set TARGET to wherever the result comes back. */
3270 result = expand_twoval_unop (sincos_optab, op0, target2, target1, 0);
3271 gcc_assert (result);
3273 /* Move target1 and target2 to the memory locations indicated
3274 by op1 and op2. */
3275 emit_move_insn (op1, target1);
3276 emit_move_insn (op2, target2);
3278 return const0_rtx;
3281 /* Expand a call to the internal cexpi builtin to the sincos math function.
3282 EXP is the expression that is a call to the builtin function; if convenient,
3283 the result should be placed in TARGET. */
3285 static rtx
3286 expand_builtin_cexpi (tree exp, rtx target)
3288 tree fndecl = get_callee_fndecl (exp);
3289 tree arg, type;
3290 machine_mode mode;
3291 rtx op0, op1, op2;
3292 location_t loc = EXPR_LOCATION (exp);
3294 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
3295 return NULL_RTX;
3297 arg = CALL_EXPR_ARG (exp, 0);
3298 type = TREE_TYPE (arg);
3299 mode = TYPE_MODE (TREE_TYPE (arg));
3301 /* Try expanding via a sincos optab, fall back to emitting a libcall
3302 to sincos or cexp. We are sure we have sincos or cexp because cexpi
3303 is only generated from sincos, cexp or if we have either of them. */
3304 if (optab_handler (sincos_optab, mode) != CODE_FOR_nothing)
3306 op1 = gen_reg_rtx (mode);
3307 op2 = gen_reg_rtx (mode);
3309 op0 = expand_expr (arg, NULL_RTX, VOIDmode, EXPAND_NORMAL);
3311 /* Compute into op1 and op2. */
3312 expand_twoval_unop (sincos_optab, op0, op2, op1, 0);
3314 else if (targetm.libc_has_function (function_sincos, type))
3316 tree call, fn = NULL_TREE;
3317 tree top1, top2;
3318 rtx op1a, op2a;
3320 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
3321 fn = builtin_decl_explicit (BUILT_IN_SINCOSF);
3322 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
3323 fn = builtin_decl_explicit (BUILT_IN_SINCOS);
3324 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
3325 fn = builtin_decl_explicit (BUILT_IN_SINCOSL);
3326 else
3327 gcc_unreachable ();
3329 op1 = assign_temp (TREE_TYPE (arg), 1, 1);
3330 op2 = assign_temp (TREE_TYPE (arg), 1, 1);
3331 op1a = copy_addr_to_reg (XEXP (op1, 0));
3332 op2a = copy_addr_to_reg (XEXP (op2, 0));
3333 top1 = make_tree (build_pointer_type (TREE_TYPE (arg)), op1a);
3334 top2 = make_tree (build_pointer_type (TREE_TYPE (arg)), op2a);
3336 /* Make sure not to fold the sincos call again. */
3337 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
3338 expand_normal (build_call_nary (TREE_TYPE (TREE_TYPE (fn)),
3339 call, 3, arg, top1, top2));
3341 else
3343 tree call, fn = NULL_TREE, narg;
3344 tree ctype = build_complex_type (type);
3346 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
3347 fn = builtin_decl_explicit (BUILT_IN_CEXPF);
3348 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
3349 fn = builtin_decl_explicit (BUILT_IN_CEXP);
3350 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
3351 fn = builtin_decl_explicit (BUILT_IN_CEXPL);
3352 else
3353 gcc_unreachable ();
3355 /* If we don't have a decl for cexp create one. This is the
3356 friendliest fallback if the user calls __builtin_cexpi
3357 without full target C99 function support. */
3358 if (fn == NULL_TREE)
3360 tree fntype;
3361 const char *name = NULL;
3363 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIF)
3364 name = "cexpf";
3365 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPI)
3366 name = "cexp";
3367 else if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_CEXPIL)
3368 name = "cexpl";
3370 fntype = build_function_type_list (ctype, ctype, NULL_TREE);
3371 fn = build_fn_decl (name, fntype);
3374 narg = fold_build2_loc (loc, COMPLEX_EXPR, ctype,
3375 build_real (type, dconst0), arg);
3377 /* Make sure not to fold the cexp call again. */
3378 call = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fn)), fn);
3379 return expand_expr (build_call_nary (ctype, call, 1, narg),
3380 target, VOIDmode, EXPAND_NORMAL);
3383 /* Now build the proper return type. */
3384 return expand_expr (build2 (COMPLEX_EXPR, build_complex_type (type),
3385 make_tree (TREE_TYPE (arg), op2),
3386 make_tree (TREE_TYPE (arg), op1)),
3387 target, VOIDmode, EXPAND_NORMAL);
3390 /* Conveniently construct a function call expression. FNDECL names the
3391 function to be called, N is the number of arguments, and the "..."
3392 parameters are the argument expressions. Unlike build_call_exr
3393 this doesn't fold the call, hence it will always return a CALL_EXPR. */
3395 static tree
3396 build_call_nofold_loc (location_t loc, tree fndecl, int n, ...)
3398 va_list ap;
3399 tree fntype = TREE_TYPE (fndecl);
3400 tree fn = build1 (ADDR_EXPR, build_pointer_type (fntype), fndecl);
3402 va_start (ap, n);
3403 fn = build_call_valist (TREE_TYPE (fntype), fn, n, ap);
3404 va_end (ap);
3405 SET_EXPR_LOCATION (fn, loc);
3406 return fn;
3409 /* Expand a call to one of the builtin rounding functions gcc defines
3410 as an extension (lfloor and lceil). As these are gcc extensions we
3411 do not need to worry about setting errno to EDOM.
3412 If expanding via optab fails, lower expression to (int)(floor(x)).
3413 EXP is the expression that is a call to the builtin function;
3414 if convenient, the result should be placed in TARGET. */
3416 static rtx
3417 expand_builtin_int_roundingfn (tree exp, rtx target)
3419 convert_optab builtin_optab;
3420 rtx op0, tmp;
3421 rtx_insn *insns;
3422 tree fndecl = get_callee_fndecl (exp);
3423 enum built_in_function fallback_fn;
3424 tree fallback_fndecl;
3425 machine_mode mode;
3426 tree arg;
3428 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
3429 return NULL_RTX;
3431 arg = CALL_EXPR_ARG (exp, 0);
3433 switch (DECL_FUNCTION_CODE (fndecl))
3435 CASE_FLT_FN (BUILT_IN_ICEIL):
3436 CASE_FLT_FN (BUILT_IN_LCEIL):
3437 CASE_FLT_FN (BUILT_IN_LLCEIL):
3438 builtin_optab = lceil_optab;
3439 fallback_fn = BUILT_IN_CEIL;
3440 break;
3442 CASE_FLT_FN (BUILT_IN_IFLOOR):
3443 CASE_FLT_FN (BUILT_IN_LFLOOR):
3444 CASE_FLT_FN (BUILT_IN_LLFLOOR):
3445 builtin_optab = lfloor_optab;
3446 fallback_fn = BUILT_IN_FLOOR;
3447 break;
3449 default:
3450 gcc_unreachable ();
3453 /* Make a suitable register to place result in. */
3454 mode = TYPE_MODE (TREE_TYPE (exp));
3456 target = gen_reg_rtx (mode);
3458 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
3459 need to expand the argument again. This way, we will not perform
3460 side-effects more the once. */
3461 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
3463 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
3465 start_sequence ();
3467 /* Compute into TARGET. */
3468 if (expand_sfix_optab (target, op0, builtin_optab))
3470 /* Output the entire sequence. */
3471 insns = get_insns ();
3472 end_sequence ();
3473 emit_insn (insns);
3474 return target;
3477 /* If we were unable to expand via the builtin, stop the sequence
3478 (without outputting the insns). */
3479 end_sequence ();
3481 /* Fall back to floating point rounding optab. */
3482 fallback_fndecl = mathfn_built_in (TREE_TYPE (arg), fallback_fn);
3484 /* For non-C99 targets we may end up without a fallback fndecl here
3485 if the user called __builtin_lfloor directly. In this case emit
3486 a call to the floor/ceil variants nevertheless. This should result
3487 in the best user experience for not full C99 targets. */
3488 if (fallback_fndecl == NULL_TREE)
3490 tree fntype;
3491 const char *name = NULL;
3493 switch (DECL_FUNCTION_CODE (fndecl))
3495 case BUILT_IN_ICEIL:
3496 case BUILT_IN_LCEIL:
3497 case BUILT_IN_LLCEIL:
3498 name = "ceil";
3499 break;
3500 case BUILT_IN_ICEILF:
3501 case BUILT_IN_LCEILF:
3502 case BUILT_IN_LLCEILF:
3503 name = "ceilf";
3504 break;
3505 case BUILT_IN_ICEILL:
3506 case BUILT_IN_LCEILL:
3507 case BUILT_IN_LLCEILL:
3508 name = "ceill";
3509 break;
3510 case BUILT_IN_IFLOOR:
3511 case BUILT_IN_LFLOOR:
3512 case BUILT_IN_LLFLOOR:
3513 name = "floor";
3514 break;
3515 case BUILT_IN_IFLOORF:
3516 case BUILT_IN_LFLOORF:
3517 case BUILT_IN_LLFLOORF:
3518 name = "floorf";
3519 break;
3520 case BUILT_IN_IFLOORL:
3521 case BUILT_IN_LFLOORL:
3522 case BUILT_IN_LLFLOORL:
3523 name = "floorl";
3524 break;
3525 default:
3526 gcc_unreachable ();
3529 fntype = build_function_type_list (TREE_TYPE (arg),
3530 TREE_TYPE (arg), NULL_TREE);
3531 fallback_fndecl = build_fn_decl (name, fntype);
3534 exp = build_call_nofold_loc (EXPR_LOCATION (exp), fallback_fndecl, 1, arg);
3536 tmp = expand_normal (exp);
3537 tmp = maybe_emit_group_store (tmp, TREE_TYPE (exp));
3539 /* Truncate the result of floating point optab to integer
3540 via expand_fix (). */
3541 target = gen_reg_rtx (mode);
3542 expand_fix (target, tmp, 0);
3544 return target;
3547 /* Expand a call to one of the builtin math functions doing integer
3548 conversion (lrint).
3549 Return 0 if a normal call should be emitted rather than expanding the
3550 function in-line. EXP is the expression that is a call to the builtin
3551 function; if convenient, the result should be placed in TARGET. */
3553 static rtx
3554 expand_builtin_int_roundingfn_2 (tree exp, rtx target)
3556 convert_optab builtin_optab;
3557 rtx op0;
3558 rtx_insn *insns;
3559 tree fndecl = get_callee_fndecl (exp);
3560 tree arg;
3561 machine_mode mode;
3562 enum built_in_function fallback_fn = BUILT_IN_NONE;
3564 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
3565 return NULL_RTX;
3567 arg = CALL_EXPR_ARG (exp, 0);
3569 switch (DECL_FUNCTION_CODE (fndecl))
3571 CASE_FLT_FN (BUILT_IN_IRINT):
3572 fallback_fn = BUILT_IN_LRINT;
3573 gcc_fallthrough ();
3574 CASE_FLT_FN (BUILT_IN_LRINT):
3575 CASE_FLT_FN (BUILT_IN_LLRINT):
3576 builtin_optab = lrint_optab;
3577 break;
3579 CASE_FLT_FN (BUILT_IN_IROUND):
3580 fallback_fn = BUILT_IN_LROUND;
3581 gcc_fallthrough ();
3582 CASE_FLT_FN (BUILT_IN_LROUND):
3583 CASE_FLT_FN (BUILT_IN_LLROUND):
3584 builtin_optab = lround_optab;
3585 break;
3587 default:
3588 gcc_unreachable ();
3591 /* There's no easy way to detect the case we need to set EDOM. */
3592 if (flag_errno_math && fallback_fn == BUILT_IN_NONE)
3593 return NULL_RTX;
3595 /* Make a suitable register to place result in. */
3596 mode = TYPE_MODE (TREE_TYPE (exp));
3598 /* There's no easy way to detect the case we need to set EDOM. */
3599 if (!flag_errno_math)
3601 rtx result = gen_reg_rtx (mode);
3603 /* Wrap the computation of the argument in a SAVE_EXPR, as we may
3604 need to expand the argument again. This way, we will not perform
3605 side-effects more the once. */
3606 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
3608 op0 = expand_expr (arg, NULL, VOIDmode, EXPAND_NORMAL);
3610 start_sequence ();
3612 if (expand_sfix_optab (result, op0, builtin_optab))
3614 /* Output the entire sequence. */
3615 insns = get_insns ();
3616 end_sequence ();
3617 emit_insn (insns);
3618 return result;
3621 /* If we were unable to expand via the builtin, stop the sequence
3622 (without outputting the insns) and call to the library function
3623 with the stabilized argument list. */
3624 end_sequence ();
3627 if (fallback_fn != BUILT_IN_NONE)
3629 /* Fall back to rounding to long int. Use implicit_p 0 - for non-C99
3630 targets, (int) round (x) should never be transformed into
3631 BUILT_IN_IROUND and if __builtin_iround is called directly, emit
3632 a call to lround in the hope that the target provides at least some
3633 C99 functions. This should result in the best user experience for
3634 not full C99 targets. */
3635 tree fallback_fndecl = mathfn_built_in_1
3636 (TREE_TYPE (arg), as_combined_fn (fallback_fn), 0);
3638 exp = build_call_nofold_loc (EXPR_LOCATION (exp),
3639 fallback_fndecl, 1, arg);
3641 target = expand_call (exp, NULL_RTX, target == const0_rtx);
3642 target = maybe_emit_group_store (target, TREE_TYPE (exp));
3643 return convert_to_mode (mode, target, 0);
3646 return expand_call (exp, target, target == const0_rtx);
3649 /* Expand a call to the powi built-in mathematical function. Return NULL_RTX if
3650 a normal call should be emitted rather than expanding the function
3651 in-line. EXP is the expression that is a call to the builtin
3652 function; if convenient, the result should be placed in TARGET. */
3654 static rtx
3655 expand_builtin_powi (tree exp, rtx target)
3657 tree arg0, arg1;
3658 rtx op0, op1;
3659 machine_mode mode;
3660 machine_mode mode2;
3662 if (! validate_arglist (exp, REAL_TYPE, INTEGER_TYPE, VOID_TYPE))
3663 return NULL_RTX;
3665 arg0 = CALL_EXPR_ARG (exp, 0);
3666 arg1 = CALL_EXPR_ARG (exp, 1);
3667 mode = TYPE_MODE (TREE_TYPE (exp));
3669 /* Emit a libcall to libgcc. */
3671 /* Mode of the 2nd argument must match that of an int. */
3672 mode2 = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
3674 if (target == NULL_RTX)
3675 target = gen_reg_rtx (mode);
3677 op0 = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
3678 if (GET_MODE (op0) != mode)
3679 op0 = convert_to_mode (mode, op0, 0);
3680 op1 = expand_expr (arg1, NULL_RTX, mode2, EXPAND_NORMAL);
3681 if (GET_MODE (op1) != mode2)
3682 op1 = convert_to_mode (mode2, op1, 0);
3684 target = emit_library_call_value (optab_libfunc (powi_optab, mode),
3685 target, LCT_CONST, mode,
3686 op0, mode, op1, mode2);
3688 return target;
3691 /* Expand expression EXP which is a call to the strlen builtin. Return
3692 NULL_RTX if we failed and the caller should emit a normal call, otherwise
3693 try to get the result in TARGET, if convenient. */
3695 static rtx
3696 expand_builtin_strlen (tree exp, rtx target,
3697 machine_mode target_mode)
3699 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
3700 return NULL_RTX;
3702 tree src = CALL_EXPR_ARG (exp, 0);
3703 if (!check_read_access (exp, src))
3704 return NULL_RTX;
3706 /* If the length can be computed at compile-time, return it. */
3707 if (tree len = c_strlen (src, 0))
3708 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3710 /* If the length can be computed at compile-time and is constant
3711 integer, but there are side-effects in src, evaluate
3712 src for side-effects, then return len.
3713 E.g. x = strlen (i++ ? "xfoo" + 1 : "bar");
3714 can be optimized into: i++; x = 3; */
3715 tree len = c_strlen (src, 1);
3716 if (len && TREE_CODE (len) == INTEGER_CST)
3718 expand_expr (src, const0_rtx, VOIDmode, EXPAND_NORMAL);
3719 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3722 unsigned int align = get_pointer_alignment (src) / BITS_PER_UNIT;
3724 /* If SRC is not a pointer type, don't do this operation inline. */
3725 if (align == 0)
3726 return NULL_RTX;
3728 /* Bail out if we can't compute strlen in the right mode. */
3729 machine_mode insn_mode;
3730 enum insn_code icode = CODE_FOR_nothing;
3731 FOR_EACH_MODE_FROM (insn_mode, target_mode)
3733 icode = optab_handler (strlen_optab, insn_mode);
3734 if (icode != CODE_FOR_nothing)
3735 break;
3737 if (insn_mode == VOIDmode)
3738 return NULL_RTX;
3740 /* Make a place to hold the source address. We will not expand
3741 the actual source until we are sure that the expansion will
3742 not fail -- there are trees that cannot be expanded twice. */
3743 rtx src_reg = gen_reg_rtx (Pmode);
3745 /* Mark the beginning of the strlen sequence so we can emit the
3746 source operand later. */
3747 rtx_insn *before_strlen = get_last_insn ();
3749 class expand_operand ops[4];
3750 create_output_operand (&ops[0], target, insn_mode);
3751 create_fixed_operand (&ops[1], gen_rtx_MEM (BLKmode, src_reg));
3752 create_integer_operand (&ops[2], 0);
3753 create_integer_operand (&ops[3], align);
3754 if (!maybe_expand_insn (icode, 4, ops))
3755 return NULL_RTX;
3757 /* Check to see if the argument was declared attribute nonstring
3758 and if so, issue a warning since at this point it's not known
3759 to be nul-terminated. */
3760 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
3762 /* Now that we are assured of success, expand the source. */
3763 start_sequence ();
3764 rtx pat = expand_expr (src, src_reg, Pmode, EXPAND_NORMAL);
3765 if (pat != src_reg)
3767 #ifdef POINTERS_EXTEND_UNSIGNED
3768 if (GET_MODE (pat) != Pmode)
3769 pat = convert_to_mode (Pmode, pat,
3770 POINTERS_EXTEND_UNSIGNED);
3771 #endif
3772 emit_move_insn (src_reg, pat);
3774 pat = get_insns ();
3775 end_sequence ();
3777 if (before_strlen)
3778 emit_insn_after (pat, before_strlen);
3779 else
3780 emit_insn_before (pat, get_insns ());
3782 /* Return the value in the proper mode for this function. */
3783 if (GET_MODE (ops[0].value) == target_mode)
3784 target = ops[0].value;
3785 else if (target != 0)
3786 convert_move (target, ops[0].value, 0);
3787 else
3788 target = convert_to_mode (target_mode, ops[0].value, 0);
3790 return target;
3793 /* Expand call EXP to the strnlen built-in, returning the result
3794 and setting it in TARGET. Otherwise return NULL_RTX on failure. */
3796 static rtx
3797 expand_builtin_strnlen (tree exp, rtx target, machine_mode target_mode)
3799 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
3800 return NULL_RTX;
3802 tree src = CALL_EXPR_ARG (exp, 0);
3803 tree bound = CALL_EXPR_ARG (exp, 1);
3805 if (!bound)
3806 return NULL_RTX;
3808 check_read_access (exp, src, bound);
3810 location_t loc = UNKNOWN_LOCATION;
3811 if (EXPR_HAS_LOCATION (exp))
3812 loc = EXPR_LOCATION (exp);
3814 /* FIXME: Change c_strlen() to return sizetype instead of ssizetype
3815 so these conversions aren't necessary. */
3816 c_strlen_data lendata = { };
3817 tree len = c_strlen (src, 0, &lendata, 1);
3818 if (len)
3819 len = fold_convert_loc (loc, TREE_TYPE (bound), len);
3821 if (TREE_CODE (bound) == INTEGER_CST)
3823 if (!len)
3824 return NULL_RTX;
3826 len = fold_build2_loc (loc, MIN_EXPR, size_type_node, len, bound);
3827 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3830 if (TREE_CODE (bound) != SSA_NAME)
3831 return NULL_RTX;
3833 wide_int min, max;
3834 enum value_range_kind rng = get_range_info (bound, &min, &max);
3835 if (rng != VR_RANGE)
3836 return NULL_RTX;
3838 if (!len || TREE_CODE (len) != INTEGER_CST)
3840 bool exact;
3841 lendata.decl = unterminated_array (src, &len, &exact);
3842 if (!lendata.decl)
3843 return NULL_RTX;
3846 if (lendata.decl)
3847 return NULL_RTX;
3849 if (wi::gtu_p (min, wi::to_wide (len)))
3850 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3852 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, bound);
3853 return expand_expr (len, target, target_mode, EXPAND_NORMAL);
3856 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
3857 bytes from bytes at DATA + OFFSET and return it reinterpreted as
3858 a target constant. */
3860 static rtx
3861 builtin_memcpy_read_str (void *data, void *, HOST_WIDE_INT offset,
3862 scalar_int_mode mode)
3864 /* The REPresentation pointed to by DATA need not be a nul-terminated
3865 string but the caller guarantees it's large enough for MODE. */
3866 const char *rep = (const char *) data;
3868 return c_readstr (rep + offset, mode, /*nul_terminated=*/false);
3871 /* LEN specify length of the block of memcpy/memset operation.
3872 Figure out its range and put it into MIN_SIZE/MAX_SIZE.
3873 In some cases we can make very likely guess on max size, then we
3874 set it into PROBABLE_MAX_SIZE. */
3876 static void
3877 determine_block_size (tree len, rtx len_rtx,
3878 unsigned HOST_WIDE_INT *min_size,
3879 unsigned HOST_WIDE_INT *max_size,
3880 unsigned HOST_WIDE_INT *probable_max_size)
3882 if (CONST_INT_P (len_rtx))
3884 *min_size = *max_size = *probable_max_size = UINTVAL (len_rtx);
3885 return;
3887 else
3889 wide_int min, max;
3890 enum value_range_kind range_type = VR_UNDEFINED;
3892 /* Determine bounds from the type. */
3893 if (tree_fits_uhwi_p (TYPE_MIN_VALUE (TREE_TYPE (len))))
3894 *min_size = tree_to_uhwi (TYPE_MIN_VALUE (TREE_TYPE (len)));
3895 else
3896 *min_size = 0;
3897 if (tree_fits_uhwi_p (TYPE_MAX_VALUE (TREE_TYPE (len))))
3898 *probable_max_size = *max_size
3899 = tree_to_uhwi (TYPE_MAX_VALUE (TREE_TYPE (len)));
3900 else
3901 *probable_max_size = *max_size = GET_MODE_MASK (GET_MODE (len_rtx));
3903 if (TREE_CODE (len) == SSA_NAME)
3904 range_type = get_range_info (len, &min, &max);
3905 if (range_type == VR_RANGE)
3907 if (wi::fits_uhwi_p (min) && *min_size < min.to_uhwi ())
3908 *min_size = min.to_uhwi ();
3909 if (wi::fits_uhwi_p (max) && *max_size > max.to_uhwi ())
3910 *probable_max_size = *max_size = max.to_uhwi ();
3912 else if (range_type == VR_ANTI_RANGE)
3914 /* Code like
3916 int n;
3917 if (n < 100)
3918 memcpy (a, b, n)
3920 Produce anti range allowing negative values of N. We still
3921 can use the information and make a guess that N is not negative.
3923 if (!wi::leu_p (max, 1 << 30) && wi::fits_uhwi_p (min))
3924 *probable_max_size = min.to_uhwi () - 1;
3927 gcc_checking_assert (*max_size <=
3928 (unsigned HOST_WIDE_INT)
3929 GET_MODE_MASK (GET_MODE (len_rtx)));
3932 /* Issue a warning OPT for a bounded call EXP with a bound in RANGE
3933 accessing an object with SIZE. */
3935 static bool
3936 maybe_warn_for_bound (int opt, location_t loc, tree exp, tree func,
3937 tree bndrng[2], tree size, const access_data *pad = NULL)
3939 if (!bndrng[0] || TREE_NO_WARNING (exp))
3940 return false;
3942 tree maxobjsize = max_object_size ();
3944 bool warned = false;
3946 if (opt == OPT_Wstringop_overread)
3948 bool maybe = pad && pad->src.phi ();
3950 if (tree_int_cst_lt (maxobjsize, bndrng[0]))
3952 if (bndrng[0] == bndrng[1])
3953 warned = (func
3954 ? warning_at (loc, opt,
3955 (maybe
3956 ? G_("%K%qD specified bound %E may "
3957 "exceed maximum object size %E")
3958 : G_("%K%qD specified bound %E "
3959 "exceeds maximum object size %E")),
3960 exp, func, bndrng[0], maxobjsize)
3961 : warning_at (loc, opt,
3962 (maybe
3963 ? G_("%Kspecified bound %E may "
3964 "exceed maximum object size %E")
3965 : G_("%Kspecified bound %E "
3966 "exceeds maximum object size %E")),
3967 exp, bndrng[0], maxobjsize));
3968 else
3969 warned = (func
3970 ? warning_at (loc, opt,
3971 (maybe
3972 ? G_("%K%qD specified bound [%E, %E] may "
3973 "exceed maximum object size %E")
3974 : G_("%K%qD specified bound [%E, %E] "
3975 "exceeds maximum object size %E")),
3976 exp, func,
3977 bndrng[0], bndrng[1], maxobjsize)
3978 : warning_at (loc, opt,
3979 (maybe
3980 ? G_("%Kspecified bound [%E, %E] may "
3981 "exceed maximum object size %E")
3982 : G_("%Kspecified bound [%E, %E] "
3983 "exceeds maximum object size %E")),
3984 exp, bndrng[0], bndrng[1], maxobjsize));
3986 else if (!size || tree_int_cst_le (bndrng[0], size))
3987 return false;
3988 else if (tree_int_cst_equal (bndrng[0], bndrng[1]))
3989 warned = (func
3990 ? warning_at (loc, opt,
3991 (maybe
3992 ? G_("%K%qD specified bound %E may exceed "
3993 "source size %E")
3994 : G_("%K%qD specified bound %E exceeds "
3995 "source size %E")),
3996 exp, func, bndrng[0], size)
3997 : warning_at (loc, opt,
3998 (maybe
3999 ? G_("%Kspecified bound %E may exceed "
4000 "source size %E")
4001 : G_("%Kspecified bound %E exceeds "
4002 "source size %E")),
4003 exp, bndrng[0], size));
4004 else
4005 warned = (func
4006 ? warning_at (loc, opt,
4007 (maybe
4008 ? G_("%K%qD specified bound [%E, %E] may "
4009 "exceed source size %E")
4010 : G_("%K%qD specified bound [%E, %E] exceeds "
4011 "source size %E")),
4012 exp, func, bndrng[0], bndrng[1], size)
4013 : warning_at (loc, opt,
4014 (maybe
4015 ? G_("%Kspecified bound [%E, %E] may exceed "
4016 "source size %E")
4017 : G_("%Kspecified bound [%E, %E] exceeds "
4018 "source size %E")),
4019 exp, bndrng[0], bndrng[1], size));
4020 if (warned)
4022 if (pad && pad->src.ref)
4024 if (DECL_P (pad->src.ref))
4025 inform (DECL_SOURCE_LOCATION (pad->src.ref),
4026 "source object declared here");
4027 else if (EXPR_HAS_LOCATION (pad->src.ref))
4028 inform (EXPR_LOCATION (pad->src.ref),
4029 "source object allocated here");
4031 TREE_NO_WARNING (exp) = true;
4034 return warned;
4037 bool maybe = pad && pad->dst.phi ();
4038 if (tree_int_cst_lt (maxobjsize, bndrng[0]))
4040 if (bndrng[0] == bndrng[1])
4041 warned = (func
4042 ? warning_at (loc, opt,
4043 (maybe
4044 ? G_("%K%qD specified size %E may "
4045 "exceed maximum object size %E")
4046 : G_("%K%qD specified size %E "
4047 "exceeds maximum object size %E")),
4048 exp, func, bndrng[0], maxobjsize)
4049 : warning_at (loc, opt,
4050 (maybe
4051 ? G_("%Kspecified size %E may exceed "
4052 "maximum object size %E")
4053 : G_("%Kspecified size %E exceeds "
4054 "maximum object size %E")),
4055 exp, bndrng[0], maxobjsize));
4056 else
4057 warned = (func
4058 ? warning_at (loc, opt,
4059 (maybe
4060 ? G_("%K%qD specified size between %E and %E "
4061 "may exceed maximum object size %E")
4062 : G_("%K%qD specified size between %E and %E "
4063 "exceeds maximum object size %E")),
4064 exp, func,
4065 bndrng[0], bndrng[1], maxobjsize)
4066 : warning_at (loc, opt,
4067 (maybe
4068 ? G_("%Kspecified size between %E and %E "
4069 "may exceed maximum object size %E")
4070 : G_("%Kspecified size between %E and %E "
4071 "exceeds maximum object size %E")),
4072 exp, bndrng[0], bndrng[1], maxobjsize));
4074 else if (!size || tree_int_cst_le (bndrng[0], size))
4075 return false;
4076 else if (tree_int_cst_equal (bndrng[0], bndrng[1]))
4077 warned = (func
4078 ? warning_at (loc, OPT_Wstringop_overflow_,
4079 (maybe
4080 ? G_("%K%qD specified bound %E may exceed "
4081 "destination size %E")
4082 : G_("%K%qD specified bound %E exceeds "
4083 "destination size %E")),
4084 exp, func, bndrng[0], size)
4085 : warning_at (loc, OPT_Wstringop_overflow_,
4086 (maybe
4087 ? G_("%Kspecified bound %E may exceed "
4088 "destination size %E")
4089 : G_("%Kspecified bound %E exceeds "
4090 "destination size %E")),
4091 exp, bndrng[0], size));
4092 else
4093 warned = (func
4094 ? warning_at (loc, OPT_Wstringop_overflow_,
4095 (maybe
4096 ? G_("%K%qD specified bound [%E, %E] may exceed "
4097 "destination size %E")
4098 : G_("%K%qD specified bound [%E, %E] exceeds "
4099 "destination size %E")),
4100 exp, func, bndrng[0], bndrng[1], size)
4101 : warning_at (loc, OPT_Wstringop_overflow_,
4102 (maybe
4103 ? G_("%Kspecified bound [%E, %E] exceeds "
4104 "destination size %E")
4105 : G_("%Kspecified bound [%E, %E] exceeds "
4106 "destination size %E")),
4107 exp, bndrng[0], bndrng[1], size));
4109 if (warned)
4111 if (pad && pad->dst.ref)
4113 if (DECL_P (pad->dst.ref))
4114 inform (DECL_SOURCE_LOCATION (pad->dst.ref),
4115 "destination object declared here");
4116 else if (EXPR_HAS_LOCATION (pad->dst.ref))
4117 inform (EXPR_LOCATION (pad->dst.ref),
4118 "destination object allocated here");
4120 TREE_NO_WARNING (exp) = true;
4123 return warned;
4126 /* For an expression EXP issue an access warning controlled by option OPT
4127 with access to a region SIZE bytes in size in the RANGE of sizes.
4128 WRITE is true for a write access, READ for a read access, neither for
4129 call that may or may not perform an access but for which the range
4130 is expected to valid.
4131 Returns true when a warning has been issued. */
4133 static bool
4134 warn_for_access (location_t loc, tree func, tree exp, int opt, tree range[2],
4135 tree size, bool write, bool read, bool maybe)
4137 bool warned = false;
4139 if (write && read)
4141 if (tree_int_cst_equal (range[0], range[1]))
4142 warned = (func
4143 ? warning_n (loc, opt, tree_to_uhwi (range[0]),
4144 (maybe
4145 ? G_("%K%qD may access %E byte in a region "
4146 "of size %E")
4147 : G_("%K%qD accessing %E byte in a region "
4148 "of size %E")),
4149 (maybe
4150 ? G_ ("%K%qD may access %E bytes in a region "
4151 "of size %E")
4152 : G_ ("%K%qD accessing %E bytes in a region "
4153 "of size %E")),
4154 exp, func, range[0], size)
4155 : warning_n (loc, opt, tree_to_uhwi (range[0]),
4156 (maybe
4157 ? G_("%Kmay access %E byte in a region "
4158 "of size %E")
4159 : G_("%Kaccessing %E byte in a region "
4160 "of size %E")),
4161 (maybe
4162 ? G_("%Kmay access %E bytes in a region "
4163 "of size %E")
4164 : G_("%Kaccessing %E bytes in a region "
4165 "of size %E")),
4166 exp, range[0], size));
4167 else if (tree_int_cst_sign_bit (range[1]))
4169 /* Avoid printing the upper bound if it's invalid. */
4170 warned = (func
4171 ? warning_at (loc, opt,
4172 (maybe
4173 ? G_("%K%qD may access %E or more bytes "
4174 "in a region of size %E")
4175 : G_("%K%qD accessing %E or more bytes "
4176 "in a region of size %E")),
4177 exp, func, range[0], size)
4178 : warning_at (loc, opt,
4179 (maybe
4180 ? G_("%Kmay access %E or more bytes "
4181 "in a region of size %E")
4182 : G_("%Kaccessing %E or more bytes "
4183 "in a region of size %E")),
4184 exp, range[0], size));
4186 else
4187 warned = (func
4188 ? warning_at (loc, opt,
4189 (maybe
4190 ? G_("%K%qD may access between %E and %E "
4191 "bytes in a region of size %E")
4192 : G_("%K%qD accessing between %E and %E "
4193 "bytes in a region of size %E")),
4194 exp, func, range[0], range[1],
4195 size)
4196 : warning_at (loc, opt,
4197 (maybe
4198 ? G_("%Kmay access between %E and %E bytes "
4199 "in a region of size %E")
4200 : G_("%Kaccessing between %E and %E bytes "
4201 "in a region of size %E")),
4202 exp, range[0], range[1],
4203 size));
4204 return warned;
4207 if (write)
4209 if (tree_int_cst_equal (range[0], range[1]))
4210 warned = (func
4211 ? warning_n (loc, opt, tree_to_uhwi (range[0]),
4212 (maybe
4213 ? G_("%K%qD may write %E byte into a region "
4214 "of size %E")
4215 : G_("%K%qD writing %E byte into a region "
4216 "of size %E overflows the destination")),
4217 (maybe
4218 ? G_("%K%qD may write %E bytes into a region "
4219 "of size %E")
4220 : G_("%K%qD writing %E bytes into a region "
4221 "of size %E overflows the destination")),
4222 exp, func, range[0], size)
4223 : warning_n (loc, opt, tree_to_uhwi (range[0]),
4224 (maybe
4225 ? G_("%Kmay write %E byte into a region "
4226 "of size %E")
4227 : G_("%Kwriting %E byte into a region "
4228 "of size %E overflows the destination")),
4229 (maybe
4230 ? G_("%Kmay write %E bytes into a region "
4231 "of size %E")
4232 : G_("%Kwriting %E bytes into a region "
4233 "of size %E overflows the destination")),
4234 exp, range[0], size));
4235 else if (tree_int_cst_sign_bit (range[1]))
4237 /* Avoid printing the upper bound if it's invalid. */
4238 warned = (func
4239 ? warning_at (loc, opt,
4240 (maybe
4241 ? G_("%K%qD may write %E or more bytes "
4242 "into a region of size %E")
4243 : G_("%K%qD writing %E or more bytes "
4244 "into a region of size %E overflows "
4245 "the destination")),
4246 exp, func, range[0], size)
4247 : warning_at (loc, opt,
4248 (maybe
4249 ? G_("%Kmay write %E or more bytes into "
4250 "a region of size %E")
4251 : G_("%Kwriting %E or more bytes into "
4252 "a region of size %E overflows "
4253 "the destination")),
4254 exp, range[0], size));
4256 else
4257 warned = (func
4258 ? warning_at (loc, opt,
4259 (maybe
4260 ? G_("%K%qD may write between %E and %E bytes "
4261 "into a region of size %E")
4262 : G_("%K%qD writing between %E and %E bytes "
4263 "into a region of size %E overflows "
4264 "the destination")),
4265 exp, func, range[0], range[1],
4266 size)
4267 : warning_at (loc, opt,
4268 (maybe
4269 ? G_("%Kmay write between %E and %E bytes "
4270 "into a region of size %E")
4271 : G_("%Kwriting between %E and %E bytes "
4272 "into a region of size %E overflows "
4273 "the destination")),
4274 exp, range[0], range[1],
4275 size));
4276 return warned;
4279 if (read)
4281 if (tree_int_cst_equal (range[0], range[1]))
4282 warned = (func
4283 ? warning_n (loc, OPT_Wstringop_overread,
4284 tree_to_uhwi (range[0]),
4285 (maybe
4286 ? G_("%K%qD may read %E byte from a region "
4287 "of size %E")
4288 : G_("%K%qD reading %E byte from a region "
4289 "of size %E")),
4290 (maybe
4291 ? G_("%K%qD may read %E bytes from a region "
4292 "of size %E")
4293 : G_("%K%qD reading %E bytes from a region "
4294 "of size %E")),
4295 exp, func, range[0], size)
4296 : warning_n (loc, OPT_Wstringop_overread,
4297 tree_to_uhwi (range[0]),
4298 (maybe
4299 ? G_("%Kmay read %E byte from a region "
4300 "of size %E")
4301 : G_("%Kreading %E byte from a region "
4302 "of size %E")),
4303 (maybe
4304 ? G_("%Kmay read %E bytes from a region "
4305 "of size %E")
4306 : G_("%Kreading %E bytes from a region "
4307 "of size %E")),
4308 exp, range[0], size));
4309 else if (tree_int_cst_sign_bit (range[1]))
4311 /* Avoid printing the upper bound if it's invalid. */
4312 warned = (func
4313 ? warning_at (loc, OPT_Wstringop_overread,
4314 (maybe
4315 ? G_("%K%qD may read %E or more bytes "
4316 "from a region of size %E")
4317 : G_("%K%qD reading %E or more bytes "
4318 "from a region of size %E")),
4319 exp, func, range[0], size)
4320 : warning_at (loc, OPT_Wstringop_overread,
4321 (maybe
4322 ? G_("%Kmay read %E or more bytes "
4323 "from a region of size %E")
4324 : G_("%Kreading %E or more bytes "
4325 "from a region of size %E")),
4326 exp, range[0], size));
4328 else
4329 warned = (func
4330 ? warning_at (loc, OPT_Wstringop_overread,
4331 (maybe
4332 ? G_("%K%qD may read between %E and %E bytes "
4333 "from a region of size %E")
4334 : G_("%K%qD reading between %E and %E bytes "
4335 "from a region of size %E")),
4336 exp, func, range[0], range[1], size)
4337 : warning_at (loc, opt,
4338 (maybe
4339 ? G_("%Kmay read between %E and %E bytes "
4340 "from a region of size %E")
4341 : G_("%Kreading between %E and %E bytes "
4342 "from a region of size %E")),
4343 exp, range[0], range[1], size));
4345 if (warned)
4346 TREE_NO_WARNING (exp) = true;
4348 return warned;
4351 if (tree_int_cst_equal (range[0], range[1])
4352 || tree_int_cst_sign_bit (range[1]))
4353 warned = (func
4354 ? warning_n (loc, OPT_Wstringop_overread,
4355 tree_to_uhwi (range[0]),
4356 "%K%qD expecting %E byte in a region of size %E",
4357 "%K%qD expecting %E bytes in a region of size %E",
4358 exp, func, range[0], size)
4359 : warning_n (loc, OPT_Wstringop_overread,
4360 tree_to_uhwi (range[0]),
4361 "%Kexpecting %E byte in a region of size %E",
4362 "%Kexpecting %E bytes in a region of size %E",
4363 exp, range[0], size));
4364 else if (tree_int_cst_sign_bit (range[1]))
4366 /* Avoid printing the upper bound if it's invalid. */
4367 warned = (func
4368 ? warning_at (loc, OPT_Wstringop_overread,
4369 "%K%qD expecting %E or more bytes in a region "
4370 "of size %E",
4371 exp, func, range[0], size)
4372 : warning_at (loc, OPT_Wstringop_overread,
4373 "%Kexpecting %E or more bytes in a region "
4374 "of size %E",
4375 exp, range[0], size));
4377 else
4378 warned = (func
4379 ? warning_at (loc, OPT_Wstringop_overread,
4380 "%K%qD expecting between %E and %E bytes in "
4381 "a region of size %E",
4382 exp, func, range[0], range[1], size)
4383 : warning_at (loc, OPT_Wstringop_overread,
4384 "%Kexpecting between %E and %E bytes in "
4385 "a region of size %E",
4386 exp, range[0], range[1], size));
4388 if (warned)
4389 TREE_NO_WARNING (exp) = true;
4391 return warned;
4394 /* Issue one inform message describing each target of an access REF.
4395 WRITE is set for a write access and clear for a read access. */
4397 void
4398 access_ref::inform_access (access_mode mode) const
4400 const access_ref &aref = *this;
4401 if (!aref.ref)
4402 return;
4404 if (aref.phi ())
4406 /* Set MAXREF to refer to the largest object and fill ALL_REFS
4407 with data for all objects referenced by the PHI arguments. */
4408 access_ref maxref;
4409 auto_vec<access_ref> all_refs;
4410 if (!get_ref (&all_refs, &maxref))
4411 return;
4413 /* Except for MAXREF, the rest of the arguments' offsets need not
4414 reflect one added to the PHI itself. Determine the latter from
4415 MAXREF on which the result is based. */
4416 const offset_int orng[] =
4418 offrng[0] - maxref.offrng[0],
4419 wi::smax (offrng[1] - maxref.offrng[1], offrng[0]),
4422 /* Add the final PHI's offset to that of each of the arguments
4423 and recurse to issue an inform message for it. */
4424 for (unsigned i = 0; i != all_refs.length (); ++i)
4426 /* Skip any PHIs; those could lead to infinite recursion. */
4427 if (all_refs[i].phi ())
4428 continue;
4430 all_refs[i].add_offset (orng[0], orng[1]);
4431 all_refs[i].inform_access (mode);
4433 return;
4436 /* Convert offset range and avoid including a zero range since it
4437 isn't necessarily meaningful. */
4438 HOST_WIDE_INT diff_min = tree_to_shwi (TYPE_MIN_VALUE (ptrdiff_type_node));
4439 HOST_WIDE_INT diff_max = tree_to_shwi (TYPE_MAX_VALUE (ptrdiff_type_node));
4440 HOST_WIDE_INT minoff;
4441 HOST_WIDE_INT maxoff = diff_max;
4442 if (wi::fits_shwi_p (aref.offrng[0]))
4443 minoff = aref.offrng[0].to_shwi ();
4444 else
4445 minoff = aref.offrng[0] < 0 ? diff_min : diff_max;
4447 if (wi::fits_shwi_p (aref.offrng[1]))
4448 maxoff = aref.offrng[1].to_shwi ();
4450 if (maxoff <= diff_min || maxoff >= diff_max)
4451 /* Avoid mentioning an upper bound that's equal to or in excess
4452 of the maximum of ptrdiff_t. */
4453 maxoff = minoff;
4455 /* Convert size range and always include it since all sizes are
4456 meaningful. */
4457 unsigned long long minsize = 0, maxsize = 0;
4458 if (wi::fits_shwi_p (aref.sizrng[0])
4459 && wi::fits_shwi_p (aref.sizrng[1]))
4461 minsize = aref.sizrng[0].to_shwi ();
4462 maxsize = aref.sizrng[1].to_shwi ();
4465 /* SIZRNG doesn't necessarily have the same range as the allocation
4466 size determined by gimple_call_alloc_size (). */
4467 char sizestr[80];
4468 if (minsize == maxsize)
4469 sprintf (sizestr, "%llu", minsize);
4470 else
4471 sprintf (sizestr, "[%llu, %llu]", minsize, maxsize);
4473 char offstr[80];
4474 if (minoff == 0
4475 && (maxoff == 0 || aref.sizrng[1] <= maxoff))
4476 offstr[0] = '\0';
4477 else if (minoff == maxoff)
4478 sprintf (offstr, "%lli", (long long) minoff);
4479 else
4480 sprintf (offstr, "[%lli, %lli]", (long long) minoff, (long long) maxoff);
4482 location_t loc = UNKNOWN_LOCATION;
4484 tree ref = this->ref;
4485 tree allocfn = NULL_TREE;
4486 if (TREE_CODE (ref) == SSA_NAME)
4488 gimple *stmt = SSA_NAME_DEF_STMT (ref);
4489 if (is_gimple_call (stmt))
4491 loc = gimple_location (stmt);
4492 if (gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN))
4494 /* Strip the SSA_NAME suffix from the variable name and
4495 recreate an identifier with the VLA's original name. */
4496 ref = gimple_call_lhs (stmt);
4497 if (SSA_NAME_IDENTIFIER (ref))
4499 ref = SSA_NAME_IDENTIFIER (ref);
4500 const char *id = IDENTIFIER_POINTER (ref);
4501 size_t len = strcspn (id, ".$");
4502 if (!len)
4503 len = strlen (id);
4504 ref = get_identifier_with_length (id, len);
4507 else
4509 /* Except for VLAs, retrieve the allocation function. */
4510 allocfn = gimple_call_fndecl (stmt);
4511 if (!allocfn)
4512 allocfn = gimple_call_fn (stmt);
4513 if (TREE_CODE (allocfn) == SSA_NAME)
4515 /* For an ALLOC_CALL via a function pointer make a small
4516 effort to determine the destination of the pointer. */
4517 gimple *def = SSA_NAME_DEF_STMT (allocfn);
4518 if (gimple_assign_single_p (def))
4520 tree rhs = gimple_assign_rhs1 (def);
4521 if (DECL_P (rhs))
4522 allocfn = rhs;
4523 else if (TREE_CODE (rhs) == COMPONENT_REF)
4524 allocfn = TREE_OPERAND (rhs, 1);
4529 else if (gimple_nop_p (stmt))
4530 /* Handle DECL_PARM below. */
4531 ref = SSA_NAME_VAR (ref);
4534 if (DECL_P (ref))
4535 loc = DECL_SOURCE_LOCATION (ref);
4536 else if (EXPR_P (ref) && EXPR_HAS_LOCATION (ref))
4537 loc = EXPR_LOCATION (ref);
4538 else if (TREE_CODE (ref) != IDENTIFIER_NODE
4539 && TREE_CODE (ref) != SSA_NAME)
4540 return;
4542 if (mode == access_read_write || mode == access_write_only)
4544 if (allocfn == NULL_TREE)
4546 if (*offstr)
4547 inform (loc, "at offset %s into destination object %qE of size %s",
4548 offstr, ref, sizestr);
4549 else
4550 inform (loc, "destination object %qE of size %s", ref, sizestr);
4551 return;
4554 if (*offstr)
4555 inform (loc,
4556 "at offset %s into destination object of size %s "
4557 "allocated by %qE", offstr, sizestr, allocfn);
4558 else
4559 inform (loc, "destination object of size %s allocated by %qE",
4560 sizestr, allocfn);
4561 return;
4564 if (allocfn == NULL_TREE)
4566 if (*offstr)
4567 inform (loc, "at offset %s into source object %qE of size %s",
4568 offstr, ref, sizestr);
4569 else
4570 inform (loc, "source object %qE of size %s", ref, sizestr);
4572 return;
4575 if (*offstr)
4576 inform (loc,
4577 "at offset %s into source object of size %s allocated by %qE",
4578 offstr, sizestr, allocfn);
4579 else
4580 inform (loc, "source object of size %s allocated by %qE",
4581 sizestr, allocfn);
4584 /* Helper to set RANGE to the range of BOUND if it's nonnull, bounded
4585 by BNDRNG if nonnull and valid. */
4587 static void
4588 get_size_range (tree bound, tree range[2], const offset_int bndrng[2])
4590 if (bound)
4591 get_size_range (bound, range);
4593 if (!bndrng || (bndrng[0] == 0 && bndrng[1] == HOST_WIDE_INT_M1U))
4594 return;
4596 if (range[0] && TREE_CODE (range[0]) == INTEGER_CST)
4598 offset_int r[] =
4599 { wi::to_offset (range[0]), wi::to_offset (range[1]) };
4600 if (r[0] < bndrng[0])
4601 range[0] = wide_int_to_tree (sizetype, bndrng[0]);
4602 if (bndrng[1] < r[1])
4603 range[1] = wide_int_to_tree (sizetype, bndrng[1]);
4605 else
4607 range[0] = wide_int_to_tree (sizetype, bndrng[0]);
4608 range[1] = wide_int_to_tree (sizetype, bndrng[1]);
4612 /* Try to verify that the sizes and lengths of the arguments to a string
4613 manipulation function given by EXP are within valid bounds and that
4614 the operation does not lead to buffer overflow or read past the end.
4615 Arguments other than EXP may be null. When non-null, the arguments
4616 have the following meaning:
4617 DST is the destination of a copy call or NULL otherwise.
4618 SRC is the source of a copy call or NULL otherwise.
4619 DSTWRITE is the number of bytes written into the destination obtained
4620 from the user-supplied size argument to the function (such as in
4621 memcpy(DST, SRCs, DSTWRITE) or strncpy(DST, DRC, DSTWRITE).
4622 MAXREAD is the user-supplied bound on the length of the source sequence
4623 (such as in strncat(d, s, N). It specifies the upper limit on the number
4624 of bytes to write. If NULL, it's taken to be the same as DSTWRITE.
4625 SRCSTR is the source string (such as in strcpy(DST, SRC)) when the
4626 expression EXP is a string function call (as opposed to a memory call
4627 like memcpy). As an exception, SRCSTR can also be an integer denoting
4628 the precomputed size of the source string or object (for functions like
4629 memcpy).
4630 DSTSIZE is the size of the destination object.
4632 When DSTWRITE is null LEN is checked to verify that it doesn't exceed
4633 SIZE_MAX.
4635 WRITE is true for write accesses, READ is true for reads. Both are
4636 false for simple size checks in calls to functions that neither read
4637 from nor write to the region.
4639 When nonnull, PAD points to a more detailed description of the access.
4641 If the call is successfully verified as safe return true, otherwise
4642 return false. */
4644 bool
4645 check_access (tree exp, tree dstwrite,
4646 tree maxread, tree srcstr, tree dstsize,
4647 access_mode mode, const access_data *pad /* = NULL */)
4649 /* The size of the largest object is half the address space, or
4650 PTRDIFF_MAX. (This is way too permissive.) */
4651 tree maxobjsize = max_object_size ();
4653 /* Either an approximate/minimum the length of the source string for
4654 string functions or the size of the source object for raw memory
4655 functions. */
4656 tree slen = NULL_TREE;
4658 /* The range of the access in bytes; first set to the write access
4659 for functions that write and then read for those that also (or
4660 just) read. */
4661 tree range[2] = { NULL_TREE, NULL_TREE };
4663 /* Set to true when the exact number of bytes written by a string
4664 function like strcpy is not known and the only thing that is
4665 known is that it must be at least one (for the terminating nul). */
4666 bool at_least_one = false;
4667 if (srcstr)
4669 /* SRCSTR is normally a pointer to string but as a special case
4670 it can be an integer denoting the length of a string. */
4671 if (POINTER_TYPE_P (TREE_TYPE (srcstr)))
4673 if (!check_nul_terminated_array (exp, srcstr, maxread))
4674 return false;
4675 /* Try to determine the range of lengths the source string
4676 refers to. If it can be determined and is less than
4677 the upper bound given by MAXREAD add one to it for
4678 the terminating nul. Otherwise, set it to one for
4679 the same reason, or to MAXREAD as appropriate. */
4680 c_strlen_data lendata = { };
4681 get_range_strlen (srcstr, &lendata, /* eltsize = */ 1);
4682 range[0] = lendata.minlen;
4683 range[1] = lendata.maxbound ? lendata.maxbound : lendata.maxlen;
4684 if (range[0]
4685 && TREE_CODE (range[0]) == INTEGER_CST
4686 && TREE_CODE (range[1]) == INTEGER_CST
4687 && (!maxread || TREE_CODE (maxread) == INTEGER_CST))
4689 if (maxread && tree_int_cst_le (maxread, range[0]))
4690 range[0] = range[1] = maxread;
4691 else
4692 range[0] = fold_build2 (PLUS_EXPR, size_type_node,
4693 range[0], size_one_node);
4695 if (maxread && tree_int_cst_le (maxread, range[1]))
4696 range[1] = maxread;
4697 else if (!integer_all_onesp (range[1]))
4698 range[1] = fold_build2 (PLUS_EXPR, size_type_node,
4699 range[1], size_one_node);
4701 slen = range[0];
4703 else
4705 at_least_one = true;
4706 slen = size_one_node;
4709 else
4710 slen = srcstr;
4713 if (!dstwrite && !maxread)
4715 /* When the only available piece of data is the object size
4716 there is nothing to do. */
4717 if (!slen)
4718 return true;
4720 /* Otherwise, when the length of the source sequence is known
4721 (as with strlen), set DSTWRITE to it. */
4722 if (!range[0])
4723 dstwrite = slen;
4726 if (!dstsize)
4727 dstsize = maxobjsize;
4729 /* Set RANGE to that of DSTWRITE if non-null, bounded by PAD->DST.BNDRNG
4730 if valid. */
4731 get_size_range (dstwrite, range, pad ? pad->dst.bndrng : NULL);
4733 tree func = get_callee_fndecl (exp);
4734 /* Read vs write access by built-ins can be determined from the const
4735 qualifiers on the pointer argument. In the absence of attribute
4736 access, non-const qualified pointer arguments to user-defined
4737 functions are assumed to both read and write the objects. */
4738 const bool builtin = func ? fndecl_built_in_p (func) : false;
4740 /* First check the number of bytes to be written against the maximum
4741 object size. */
4742 if (range[0]
4743 && TREE_CODE (range[0]) == INTEGER_CST
4744 && tree_int_cst_lt (maxobjsize, range[0]))
4746 location_t loc = tree_inlined_location (exp);
4747 maybe_warn_for_bound (OPT_Wstringop_overflow_, loc, exp, func, range,
4748 NULL_TREE, pad);
4749 return false;
4752 /* The number of bytes to write is "exact" if DSTWRITE is non-null,
4753 constant, and in range of unsigned HOST_WIDE_INT. */
4754 bool exactwrite = dstwrite && tree_fits_uhwi_p (dstwrite);
4756 /* Next check the number of bytes to be written against the destination
4757 object size. */
4758 if (range[0] || !exactwrite || integer_all_onesp (dstwrite))
4760 if (range[0]
4761 && TREE_CODE (range[0]) == INTEGER_CST
4762 && ((tree_fits_uhwi_p (dstsize)
4763 && tree_int_cst_lt (dstsize, range[0]))
4764 || (dstwrite
4765 && tree_fits_uhwi_p (dstwrite)
4766 && tree_int_cst_lt (dstwrite, range[0]))))
4768 if (TREE_NO_WARNING (exp)
4769 || (pad && pad->dst.ref && TREE_NO_WARNING (pad->dst.ref)))
4770 return false;
4772 location_t loc = tree_inlined_location (exp);
4773 bool warned = false;
4774 if (dstwrite == slen && at_least_one)
4776 /* This is a call to strcpy with a destination of 0 size
4777 and a source of unknown length. The call will write
4778 at least one byte past the end of the destination. */
4779 warned = (func
4780 ? warning_at (loc, OPT_Wstringop_overflow_,
4781 "%K%qD writing %E or more bytes into "
4782 "a region of size %E overflows "
4783 "the destination",
4784 exp, func, range[0], dstsize)
4785 : warning_at (loc, OPT_Wstringop_overflow_,
4786 "%Kwriting %E or more bytes into "
4787 "a region of size %E overflows "
4788 "the destination",
4789 exp, range[0], dstsize));
4791 else
4793 const bool read
4794 = mode == access_read_only || mode == access_read_write;
4795 const bool write
4796 = mode == access_write_only || mode == access_read_write;
4797 const bool maybe = pad && pad->dst.parmarray;
4798 warned = warn_for_access (loc, func, exp,
4799 OPT_Wstringop_overflow_,
4800 range, dstsize,
4801 write, read && !builtin, maybe);
4804 if (warned)
4806 TREE_NO_WARNING (exp) = true;
4807 if (pad)
4808 pad->dst.inform_access (pad->mode);
4811 /* Return error when an overflow has been detected. */
4812 return false;
4816 /* Check the maximum length of the source sequence against the size
4817 of the destination object if known, or against the maximum size
4818 of an object. */
4819 if (maxread)
4821 /* Set RANGE to that of MAXREAD, bounded by PAD->SRC.BNDRNG if
4822 PAD is nonnull and BNDRNG is valid. */
4823 get_size_range (maxread, range, pad ? pad->src.bndrng : NULL);
4825 location_t loc = tree_inlined_location (exp);
4826 tree size = dstsize;
4827 if (pad && pad->mode == access_read_only)
4828 size = wide_int_to_tree (sizetype, pad->src.sizrng[1]);
4830 if (range[0] && maxread && tree_fits_uhwi_p (size))
4832 if (tree_int_cst_lt (maxobjsize, range[0]))
4834 maybe_warn_for_bound (OPT_Wstringop_overread, loc, exp, func,
4835 range, size, pad);
4836 return false;
4839 if (size != maxobjsize && tree_int_cst_lt (size, range[0]))
4841 int opt = (dstwrite || mode != access_read_only
4842 ? OPT_Wstringop_overflow_
4843 : OPT_Wstringop_overread);
4844 maybe_warn_for_bound (opt, loc, exp, func, range, size, pad);
4845 return false;
4849 maybe_warn_nonstring_arg (func, exp);
4852 /* Check for reading past the end of SRC. */
4853 bool overread = (slen
4854 && slen == srcstr
4855 && dstwrite
4856 && range[0]
4857 && TREE_CODE (slen) == INTEGER_CST
4858 && tree_int_cst_lt (slen, range[0]));
4859 /* If none is determined try to get a better answer based on the details
4860 in PAD. */
4861 if (!overread
4862 && pad
4863 && pad->src.sizrng[1] >= 0
4864 && pad->src.offrng[0] >= 0
4865 && (pad->src.offrng[1] < 0
4866 || pad->src.offrng[0] <= pad->src.offrng[1]))
4868 /* Set RANGE to that of MAXREAD, bounded by PAD->SRC.BNDRNG if
4869 PAD is nonnull and BNDRNG is valid. */
4870 get_size_range (maxread, range, pad ? pad->src.bndrng : NULL);
4871 /* Set OVERREAD for reads starting just past the end of an object. */
4872 overread = pad->src.sizrng[1] - pad->src.offrng[0] < pad->src.bndrng[0];
4873 range[0] = wide_int_to_tree (sizetype, pad->src.bndrng[0]);
4874 slen = size_zero_node;
4877 if (overread)
4879 if (TREE_NO_WARNING (exp)
4880 || (srcstr && TREE_NO_WARNING (srcstr))
4881 || (pad && pad->src.ref && TREE_NO_WARNING (pad->src.ref)))
4882 return false;
4884 location_t loc = tree_inlined_location (exp);
4885 const bool read
4886 = mode == access_read_only || mode == access_read_write;
4887 const bool maybe = pad && pad->dst.parmarray;
4888 if (warn_for_access (loc, func, exp, OPT_Wstringop_overread, range,
4889 slen, false, read, maybe))
4891 TREE_NO_WARNING (exp) = true;
4892 if (pad)
4893 pad->src.inform_access (access_read_only);
4895 return false;
4898 return true;
4901 /* A convenience wrapper for check_access above to check access
4902 by a read-only function like puts. */
4904 static bool
4905 check_read_access (tree exp, tree src, tree bound /* = NULL_TREE */,
4906 int ost /* = 1 */)
4908 if (!warn_stringop_overread)
4909 return true;
4911 access_data data (exp, access_read_only, NULL_TREE, false, bound, true);
4912 compute_objsize (src, ost, &data.src);
4913 return check_access (exp, /*dstwrite=*/ NULL_TREE, /*maxread=*/ bound,
4914 /*srcstr=*/ src, /*dstsize=*/ NULL_TREE, data.mode,
4915 &data);
4918 /* If STMT is a call to an allocation function, returns the constant
4919 maximum size of the object allocated by the call represented as
4920 sizetype. If nonnull, sets RNG1[] to the range of the size.
4921 When nonnull, uses RVALS for range information, otherwise calls
4922 get_range_info to get it.
4923 Returns null when STMT is not a call to a valid allocation function. */
4925 tree
4926 gimple_call_alloc_size (gimple *stmt, wide_int rng1[2] /* = NULL */,
4927 range_query * /* = NULL */)
4929 if (!stmt || !is_gimple_call (stmt))
4930 return NULL_TREE;
4932 tree allocfntype;
4933 if (tree fndecl = gimple_call_fndecl (stmt))
4934 allocfntype = TREE_TYPE (fndecl);
4935 else
4936 allocfntype = gimple_call_fntype (stmt);
4938 if (!allocfntype)
4939 return NULL_TREE;
4941 unsigned argidx1 = UINT_MAX, argidx2 = UINT_MAX;
4942 tree at = lookup_attribute ("alloc_size", TYPE_ATTRIBUTES (allocfntype));
4943 if (!at)
4945 if (!gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN))
4946 return NULL_TREE;
4948 argidx1 = 0;
4951 unsigned nargs = gimple_call_num_args (stmt);
4953 if (argidx1 == UINT_MAX)
4955 tree atval = TREE_VALUE (at);
4956 if (!atval)
4957 return NULL_TREE;
4959 argidx1 = TREE_INT_CST_LOW (TREE_VALUE (atval)) - 1;
4960 if (nargs <= argidx1)
4961 return NULL_TREE;
4963 atval = TREE_CHAIN (atval);
4964 if (atval)
4966 argidx2 = TREE_INT_CST_LOW (TREE_VALUE (atval)) - 1;
4967 if (nargs <= argidx2)
4968 return NULL_TREE;
4972 tree size = gimple_call_arg (stmt, argidx1);
4974 wide_int rng1_buf[2];
4975 /* If RNG1 is not set, use the buffer. */
4976 if (!rng1)
4977 rng1 = rng1_buf;
4979 /* Use maximum precision to avoid overflow below. */
4980 const int prec = ADDR_MAX_PRECISION;
4983 tree r[2];
4984 /* Determine the largest valid range size, including zero. */
4985 if (!get_size_range (size, r, SR_ALLOW_ZERO | SR_USE_LARGEST))
4986 return NULL_TREE;
4987 rng1[0] = wi::to_wide (r[0], prec);
4988 rng1[1] = wi::to_wide (r[1], prec);
4991 if (argidx2 > nargs && TREE_CODE (size) == INTEGER_CST)
4992 return fold_convert (sizetype, size);
4994 /* To handle ranges do the math in wide_int and return the product
4995 of the upper bounds as a constant. Ignore anti-ranges. */
4996 tree n = argidx2 < nargs ? gimple_call_arg (stmt, argidx2) : integer_one_node;
4997 wide_int rng2[2];
4999 tree r[2];
5000 /* As above, use the full non-negative range on failure. */
5001 if (!get_size_range (n, r, SR_ALLOW_ZERO | SR_USE_LARGEST))
5002 return NULL_TREE;
5003 rng2[0] = wi::to_wide (r[0], prec);
5004 rng2[1] = wi::to_wide (r[1], prec);
5007 /* Compute products of both bounds for the caller but return the lesser
5008 of SIZE_MAX and the product of the upper bounds as a constant. */
5009 rng1[0] = rng1[0] * rng2[0];
5010 rng1[1] = rng1[1] * rng2[1];
5012 const tree size_max = TYPE_MAX_VALUE (sizetype);
5013 if (wi::gtu_p (rng1[1], wi::to_wide (size_max, prec)))
5015 rng1[1] = wi::to_wide (size_max, prec);
5016 return size_max;
5019 return wide_int_to_tree (sizetype, rng1[1]);
5022 /* For an access to an object referenced to by the function parameter PTR
5023 of pointer type, and set RNG[] to the range of sizes of the object
5024 obtainedfrom the attribute access specification for the current function.
5025 Set STATIC_ARRAY if the array parameter has been declared [static].
5026 Return the function parameter on success and null otherwise. */
5028 tree
5029 gimple_parm_array_size (tree ptr, wide_int rng[2],
5030 bool *static_array /* = NULL */)
5032 /* For a function argument try to determine the byte size of the array
5033 from the current function declaratation (e.g., attribute access or
5034 related). */
5035 tree var = SSA_NAME_VAR (ptr);
5036 if (TREE_CODE (var) != PARM_DECL)
5037 return NULL_TREE;
5039 const unsigned prec = TYPE_PRECISION (sizetype);
5041 rdwr_map rdwr_idx;
5042 attr_access *access = get_parm_access (rdwr_idx, var);
5043 if (!access)
5044 return NULL_TREE;
5046 if (access->sizarg != UINT_MAX)
5048 /* TODO: Try to extract the range from the argument based on
5049 those of subsequent assertions or based on known calls to
5050 the current function. */
5051 return NULL_TREE;
5054 if (!access->minsize)
5055 return NULL_TREE;
5057 /* Only consider ordinary array bound at level 2 (or above if it's
5058 ever added). */
5059 if (warn_array_parameter < 2 && !access->static_p)
5060 return NULL_TREE;
5062 if (static_array)
5063 *static_array = access->static_p;
5065 rng[0] = wi::zero (prec);
5066 rng[1] = wi::uhwi (access->minsize, prec);
5067 /* Multiply the array bound encoded in the attribute by the size
5068 of what the pointer argument to which it decays points to. */
5069 tree eltype = TREE_TYPE (TREE_TYPE (ptr));
5070 tree size = TYPE_SIZE_UNIT (eltype);
5071 if (!size || TREE_CODE (size) != INTEGER_CST)
5072 return NULL_TREE;
5074 rng[1] *= wi::to_wide (size, prec);
5075 return var;
5078 /* Wrapper around the wide_int overload of get_range that accepts
5079 offset_int instead. For middle end expressions returns the same
5080 result. For a subset of nonconstamt expressions emitted by the front
5081 end determines a more precise range than would be possible otherwise. */
5083 static bool
5084 get_offset_range (tree x, gimple *stmt, offset_int r[2], range_query *rvals)
5086 offset_int add = 0;
5087 if (TREE_CODE (x) == PLUS_EXPR)
5089 /* Handle constant offsets in pointer addition expressions seen
5090 n the front end IL. */
5091 tree op = TREE_OPERAND (x, 1);
5092 if (TREE_CODE (op) == INTEGER_CST)
5094 op = fold_convert (signed_type_for (TREE_TYPE (op)), op);
5095 add = wi::to_offset (op);
5096 x = TREE_OPERAND (x, 0);
5100 if (TREE_CODE (x) == NOP_EXPR)
5101 /* Also handle conversions to sizetype seen in the front end IL. */
5102 x = TREE_OPERAND (x, 0);
5104 tree type = TREE_TYPE (x);
5105 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
5106 return false;
5108 if (TREE_CODE (x) != INTEGER_CST
5109 && TREE_CODE (x) != SSA_NAME)
5111 if (TYPE_UNSIGNED (type)
5112 && TYPE_PRECISION (type) == TYPE_PRECISION (sizetype))
5113 type = signed_type_for (type);
5115 r[0] = wi::to_offset (TYPE_MIN_VALUE (type)) + add;
5116 r[1] = wi::to_offset (TYPE_MAX_VALUE (type)) + add;
5117 return x;
5120 wide_int wr[2];
5121 if (!get_range (x, stmt, wr, rvals))
5122 return false;
5124 signop sgn = SIGNED;
5125 /* Only convert signed integers or unsigned sizetype to a signed
5126 offset and avoid converting large positive values in narrower
5127 types to negative offsets. */
5128 if (TYPE_UNSIGNED (type)
5129 && wr[0].get_precision () < TYPE_PRECISION (sizetype))
5130 sgn = UNSIGNED;
5132 r[0] = offset_int::from (wr[0], sgn);
5133 r[1] = offset_int::from (wr[1], sgn);
5134 return true;
5137 /* Return the argument that the call STMT to a built-in function returns
5138 or null if it doesn't. On success, set OFFRNG[] to the range of offsets
5139 from the argument reflected in the value returned by the built-in if it
5140 can be determined, otherwise to 0 and HWI_M1U respectively. */
5142 static tree
5143 gimple_call_return_array (gimple *stmt, offset_int offrng[2],
5144 range_query *rvals)
5146 if (!gimple_call_builtin_p (stmt, BUILT_IN_NORMAL)
5147 || gimple_call_num_args (stmt) < 1)
5148 return NULL_TREE;
5150 tree fn = gimple_call_fndecl (stmt);
5151 switch (DECL_FUNCTION_CODE (fn))
5153 case BUILT_IN_MEMCPY:
5154 case BUILT_IN_MEMCPY_CHK:
5155 case BUILT_IN_MEMMOVE:
5156 case BUILT_IN_MEMMOVE_CHK:
5157 case BUILT_IN_MEMSET:
5158 case BUILT_IN_STPCPY:
5159 case BUILT_IN_STPCPY_CHK:
5160 case BUILT_IN_STPNCPY:
5161 case BUILT_IN_STPNCPY_CHK:
5162 case BUILT_IN_STRCAT:
5163 case BUILT_IN_STRCAT_CHK:
5164 case BUILT_IN_STRCPY:
5165 case BUILT_IN_STRCPY_CHK:
5166 case BUILT_IN_STRNCAT:
5167 case BUILT_IN_STRNCAT_CHK:
5168 case BUILT_IN_STRNCPY:
5169 case BUILT_IN_STRNCPY_CHK:
5170 offrng[0] = offrng[1] = 0;
5171 return gimple_call_arg (stmt, 0);
5173 case BUILT_IN_MEMPCPY:
5174 case BUILT_IN_MEMPCPY_CHK:
5176 tree off = gimple_call_arg (stmt, 2);
5177 if (!get_offset_range (off, stmt, offrng, rvals))
5179 offrng[0] = 0;
5180 offrng[1] = HOST_WIDE_INT_M1U;
5182 return gimple_call_arg (stmt, 0);
5185 case BUILT_IN_MEMCHR:
5187 tree off = gimple_call_arg (stmt, 2);
5188 if (get_offset_range (off, stmt, offrng, rvals))
5189 offrng[0] = 0;
5190 else
5192 offrng[0] = 0;
5193 offrng[1] = HOST_WIDE_INT_M1U;
5195 return gimple_call_arg (stmt, 0);
5198 case BUILT_IN_STRCHR:
5199 case BUILT_IN_STRRCHR:
5200 case BUILT_IN_STRSTR:
5202 offrng[0] = 0;
5203 offrng[1] = HOST_WIDE_INT_M1U;
5205 return gimple_call_arg (stmt, 0);
5207 default:
5208 break;
5211 return NULL_TREE;
5214 /* A helper of compute_objsize_r() to determine the size from an assignment
5215 statement STMT with the RHS of either MIN_EXPR or MAX_EXPR. */
5217 static bool
5218 handle_min_max_size (gimple *stmt, int ostype, access_ref *pref,
5219 ssa_name_limit_t &snlim, pointer_query *qry)
5221 tree_code code = gimple_assign_rhs_code (stmt);
5223 tree ptr = gimple_assign_rhs1 (stmt);
5225 /* In a valid MAX_/MIN_EXPR both operands must refer to the same array.
5226 Determine the size/offset of each and use the one with more or less
5227 space remaining, respectively. If either fails, use the information
5228 determined from the other instead, adjusted up or down as appropriate
5229 for the expression. */
5230 access_ref aref[2] = { *pref, *pref };
5231 if (!compute_objsize_r (ptr, ostype, &aref[0], snlim, qry))
5233 aref[0].base0 = false;
5234 aref[0].offrng[0] = aref[0].offrng[1] = 0;
5235 aref[0].add_max_offset ();
5236 aref[0].set_max_size_range ();
5239 ptr = gimple_assign_rhs2 (stmt);
5240 if (!compute_objsize_r (ptr, ostype, &aref[1], snlim, qry))
5242 aref[1].base0 = false;
5243 aref[1].offrng[0] = aref[1].offrng[1] = 0;
5244 aref[1].add_max_offset ();
5245 aref[1].set_max_size_range ();
5248 if (!aref[0].ref && !aref[1].ref)
5249 /* Fail if the identity of neither argument could be determined. */
5250 return false;
5252 bool i0 = false;
5253 if (aref[0].ref && aref[0].base0)
5255 if (aref[1].ref && aref[1].base0)
5257 /* If the object referenced by both arguments has been determined
5258 set *PREF to the one with more or less space remainng, whichever
5259 is appopriate for CODE.
5260 TODO: Indicate when the objects are distinct so it can be
5261 diagnosed. */
5262 i0 = code == MAX_EXPR;
5263 const bool i1 = !i0;
5265 if (aref[i0].size_remaining () < aref[i1].size_remaining ())
5266 *pref = aref[i1];
5267 else
5268 *pref = aref[i0];
5269 return true;
5272 /* If only the object referenced by one of the arguments could be
5273 determined, use it and... */
5274 *pref = aref[0];
5275 i0 = true;
5277 else
5278 *pref = aref[1];
5280 const bool i1 = !i0;
5281 /* ...see if the offset obtained from the other pointer can be used
5282 to tighten up the bound on the offset obtained from the first. */
5283 if ((code == MAX_EXPR && aref[i1].offrng[1] < aref[i0].offrng[0])
5284 || (code == MIN_EXPR && aref[i0].offrng[0] < aref[i1].offrng[1]))
5286 pref->offrng[0] = aref[i0].offrng[0];
5287 pref->offrng[1] = aref[i0].offrng[1];
5289 return true;
5292 /* A helper of compute_objsize_r() to determine the size from ARRAY_REF
5293 AREF. ADDR is true if PTR is the operand of ADDR_EXPR. Return true
5294 on success and false on failure. */
5296 static bool
5297 handle_array_ref (tree aref, bool addr, int ostype, access_ref *pref,
5298 ssa_name_limit_t &snlim, pointer_query *qry)
5300 gcc_assert (TREE_CODE (aref) == ARRAY_REF);
5302 ++pref->deref;
5304 tree arefop = TREE_OPERAND (aref, 0);
5305 tree reftype = TREE_TYPE (arefop);
5306 if (!addr && TREE_CODE (TREE_TYPE (reftype)) == POINTER_TYPE)
5307 /* Avoid arrays of pointers. FIXME: Hande pointers to arrays
5308 of known bound. */
5309 return false;
5311 if (!compute_objsize_r (arefop, ostype, pref, snlim, qry))
5312 return false;
5314 offset_int orng[2];
5315 tree off = pref->eval (TREE_OPERAND (aref, 1));
5316 range_query *const rvals = qry ? qry->rvals : NULL;
5317 if (!get_offset_range (off, NULL, orng, rvals))
5319 /* Set ORNG to the maximum offset representable in ptrdiff_t. */
5320 orng[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
5321 orng[0] = -orng[1] - 1;
5324 /* Convert the array index range determined above to a byte
5325 offset. */
5326 tree lowbnd = array_ref_low_bound (aref);
5327 if (!integer_zerop (lowbnd) && tree_fits_uhwi_p (lowbnd))
5329 /* Adjust the index by the low bound of the array domain
5330 (normally zero but 1 in Fortran). */
5331 unsigned HOST_WIDE_INT lb = tree_to_uhwi (lowbnd);
5332 orng[0] -= lb;
5333 orng[1] -= lb;
5336 tree eltype = TREE_TYPE (aref);
5337 tree tpsize = TYPE_SIZE_UNIT (eltype);
5338 if (!tpsize || TREE_CODE (tpsize) != INTEGER_CST)
5340 pref->add_max_offset ();
5341 return true;
5344 offset_int sz = wi::to_offset (tpsize);
5345 orng[0] *= sz;
5346 orng[1] *= sz;
5348 if (ostype && TREE_CODE (eltype) == ARRAY_TYPE)
5350 /* Except for the permissive raw memory functions which use
5351 the size of the whole object determined above, use the size
5352 of the referenced array. Because the overall offset is from
5353 the beginning of the complete array object add this overall
5354 offset to the size of array. */
5355 offset_int sizrng[2] =
5357 pref->offrng[0] + orng[0] + sz,
5358 pref->offrng[1] + orng[1] + sz
5360 if (sizrng[1] < sizrng[0])
5361 std::swap (sizrng[0], sizrng[1]);
5362 if (sizrng[0] >= 0 && sizrng[0] <= pref->sizrng[0])
5363 pref->sizrng[0] = sizrng[0];
5364 if (sizrng[1] >= 0 && sizrng[1] <= pref->sizrng[1])
5365 pref->sizrng[1] = sizrng[1];
5368 pref->add_offset (orng[0], orng[1]);
5369 return true;
5372 /* A helper of compute_objsize_r() to determine the size from MEM_REF
5373 MREF. Return true on success and false on failure. */
5375 static bool
5376 handle_mem_ref (tree mref, int ostype, access_ref *pref,
5377 ssa_name_limit_t &snlim, pointer_query *qry)
5379 gcc_assert (TREE_CODE (mref) == MEM_REF);
5381 ++pref->deref;
5383 if (VECTOR_TYPE_P (TREE_TYPE (mref)))
5385 /* Hack: Give up for MEM_REFs of vector types; those may be
5386 synthesized from multiple assignments to consecutive data
5387 members (see PR 93200 and 96963).
5388 FIXME: Vectorized assignments should only be present after
5389 vectorization so this hack is only necessary after it has
5390 run and could be avoided in calls from prior passes (e.g.,
5391 tree-ssa-strlen.c).
5392 FIXME: Deal with this more generally, e.g., by marking up
5393 such MEM_REFs at the time they're created. */
5394 return false;
5397 tree mrefop = TREE_OPERAND (mref, 0);
5398 if (!compute_objsize_r (mrefop, ostype, pref, snlim, qry))
5399 return false;
5401 offset_int orng[2];
5402 tree off = pref->eval (TREE_OPERAND (mref, 1));
5403 range_query *const rvals = qry ? qry->rvals : NULL;
5404 if (!get_offset_range (off, NULL, orng, rvals))
5406 /* Set ORNG to the maximum offset representable in ptrdiff_t. */
5407 orng[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
5408 orng[0] = -orng[1] - 1;
5411 pref->add_offset (orng[0], orng[1]);
5412 return true;
5415 /* Helper to compute the size of the object referenced by the PTR
5416 expression which must have pointer type, using Object Size type
5417 OSTYPE (only the least significant 2 bits are used).
5418 On success, sets PREF->REF to the DECL of the referenced object
5419 if it's unique, otherwise to null, PREF->OFFRNG to the range of
5420 offsets into it, and PREF->SIZRNG to the range of sizes of
5421 the object(s).
5422 SNLIM is used to avoid visiting the same PHI operand multiple
5423 times, and, when nonnull, RVALS to determine range information.
5424 Returns true on success, false when a meaningful size (or range)
5425 cannot be determined.
5427 The function is intended for diagnostics and should not be used
5428 to influence code generation or optimization. */
5430 static bool
5431 compute_objsize_r (tree ptr, int ostype, access_ref *pref,
5432 ssa_name_limit_t &snlim, pointer_query *qry)
5434 STRIP_NOPS (ptr);
5436 const bool addr = TREE_CODE (ptr) == ADDR_EXPR;
5437 if (addr)
5439 --pref->deref;
5440 ptr = TREE_OPERAND (ptr, 0);
5443 if (DECL_P (ptr))
5445 pref->ref = ptr;
5447 if (!addr && POINTER_TYPE_P (TREE_TYPE (ptr)))
5449 /* Set the maximum size if the reference is to the pointer
5450 itself (as opposed to what it points to). */
5451 pref->set_max_size_range ();
5452 return true;
5455 if (tree size = decl_init_size (ptr, false))
5456 if (TREE_CODE (size) == INTEGER_CST)
5458 pref->sizrng[0] = pref->sizrng[1] = wi::to_offset (size);
5459 return true;
5462 pref->set_max_size_range ();
5463 return true;
5466 const tree_code code = TREE_CODE (ptr);
5467 range_query *const rvals = qry ? qry->rvals : NULL;
5469 if (code == BIT_FIELD_REF)
5471 tree ref = TREE_OPERAND (ptr, 0);
5472 if (!compute_objsize_r (ref, ostype, pref, snlim, qry))
5473 return false;
5475 offset_int off = wi::to_offset (pref->eval (TREE_OPERAND (ptr, 2)));
5476 pref->add_offset (off / BITS_PER_UNIT);
5477 return true;
5480 if (code == COMPONENT_REF)
5482 tree ref = TREE_OPERAND (ptr, 0);
5483 if (TREE_CODE (TREE_TYPE (ref)) == UNION_TYPE)
5484 /* In accesses through union types consider the entire unions
5485 rather than just their members. */
5486 ostype = 0;
5487 tree field = TREE_OPERAND (ptr, 1);
5489 if (ostype == 0)
5491 /* In OSTYPE zero (for raw memory functions like memcpy), use
5492 the maximum size instead if the identity of the enclosing
5493 object cannot be determined. */
5494 if (!compute_objsize_r (ref, ostype, pref, snlim, qry))
5495 return false;
5497 /* Otherwise, use the size of the enclosing object and add
5498 the offset of the member to the offset computed so far. */
5499 tree offset = byte_position (field);
5500 if (TREE_CODE (offset) == INTEGER_CST)
5501 pref->add_offset (wi::to_offset (offset));
5502 else
5503 pref->add_max_offset ();
5505 if (!pref->ref)
5506 /* REF may have been already set to an SSA_NAME earlier
5507 to provide better context for diagnostics. In that case,
5508 leave it unchanged. */
5509 pref->ref = ref;
5510 return true;
5513 pref->ref = field;
5515 if (!addr && POINTER_TYPE_P (TREE_TYPE (field)))
5517 /* Set maximum size if the reference is to the pointer member
5518 itself (as opposed to what it points to). */
5519 pref->set_max_size_range ();
5520 return true;
5523 /* SAM is set for array members that might need special treatment. */
5524 special_array_member sam;
5525 tree size = component_ref_size (ptr, &sam);
5526 if (sam == special_array_member::int_0)
5527 pref->sizrng[0] = pref->sizrng[1] = 0;
5528 else if (!pref->trail1special && sam == special_array_member::trail_1)
5529 pref->sizrng[0] = pref->sizrng[1] = 1;
5530 else if (size && TREE_CODE (size) == INTEGER_CST)
5531 pref->sizrng[0] = pref->sizrng[1] = wi::to_offset (size);
5532 else
5534 /* When the size of the member is unknown it's either a flexible
5535 array member or a trailing special array member (either zero
5536 length or one-element). Set the size to the maximum minus
5537 the constant size of the type. */
5538 pref->sizrng[0] = 0;
5539 pref->sizrng[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
5540 if (tree recsize = TYPE_SIZE_UNIT (TREE_TYPE (ref)))
5541 if (TREE_CODE (recsize) == INTEGER_CST)
5542 pref->sizrng[1] -= wi::to_offset (recsize);
5544 return true;
5547 if (code == ARRAY_REF)
5548 return handle_array_ref (ptr, addr, ostype, pref, snlim, qry);
5550 if (code == MEM_REF)
5551 return handle_mem_ref (ptr, ostype, pref, snlim, qry);
5553 if (code == TARGET_MEM_REF)
5555 tree ref = TREE_OPERAND (ptr, 0);
5556 if (!compute_objsize_r (ref, ostype, pref, snlim, qry))
5557 return false;
5559 /* TODO: Handle remaining operands. Until then, add maximum offset. */
5560 pref->ref = ptr;
5561 pref->add_max_offset ();
5562 return true;
5565 if (code == INTEGER_CST)
5567 /* Pointer constants other than null are most likely the result
5568 of erroneous null pointer addition/subtraction. Set size to
5569 zero. For null pointers, set size to the maximum for now
5570 since those may be the result of jump threading. */
5571 if (integer_zerop (ptr))
5572 pref->set_max_size_range ();
5573 else
5574 pref->sizrng[0] = pref->sizrng[1] = 0;
5575 pref->ref = ptr;
5577 return true;
5580 if (code == STRING_CST)
5582 pref->sizrng[0] = pref->sizrng[1] = TREE_STRING_LENGTH (ptr);
5583 pref->ref = ptr;
5584 return true;
5587 if (code == POINTER_PLUS_EXPR)
5589 tree ref = TREE_OPERAND (ptr, 0);
5590 if (!compute_objsize_r (ref, ostype, pref, snlim, qry))
5591 return false;
5593 /* Clear DEREF since the offset is being applied to the target
5594 of the dereference. */
5595 pref->deref = 0;
5597 offset_int orng[2];
5598 tree off = pref->eval (TREE_OPERAND (ptr, 1));
5599 if (get_offset_range (off, NULL, orng, rvals))
5600 pref->add_offset (orng[0], orng[1]);
5601 else
5602 pref->add_max_offset ();
5603 return true;
5606 if (code == VIEW_CONVERT_EXPR)
5608 ptr = TREE_OPERAND (ptr, 0);
5609 return compute_objsize_r (ptr, ostype, pref, snlim, qry);
5612 if (code == SSA_NAME)
5614 if (!snlim.next ())
5615 return false;
5617 /* Only process an SSA_NAME if the recursion limit has not yet
5618 been reached. */
5619 if (qry)
5621 if (++qry->depth)
5622 qry->max_depth = qry->depth;
5623 if (const access_ref *cache_ref = qry->get_ref (ptr))
5625 /* If the pointer is in the cache set *PREF to what it refers
5626 to and return success. */
5627 *pref = *cache_ref;
5628 return true;
5632 gimple *stmt = SSA_NAME_DEF_STMT (ptr);
5633 if (is_gimple_call (stmt))
5635 /* If STMT is a call to an allocation function get the size
5636 from its argument(s). If successful, also set *PREF->REF
5637 to PTR for the caller to include in diagnostics. */
5638 wide_int wr[2];
5639 if (gimple_call_alloc_size (stmt, wr, rvals))
5641 pref->ref = ptr;
5642 pref->sizrng[0] = offset_int::from (wr[0], UNSIGNED);
5643 pref->sizrng[1] = offset_int::from (wr[1], UNSIGNED);
5644 /* Constrain both bounds to a valid size. */
5645 offset_int maxsize = wi::to_offset (max_object_size ());
5646 if (pref->sizrng[0] > maxsize)
5647 pref->sizrng[0] = maxsize;
5648 if (pref->sizrng[1] > maxsize)
5649 pref->sizrng[1] = maxsize;
5651 else
5653 /* For functions known to return one of their pointer arguments
5654 try to determine what the returned pointer points to, and on
5655 success add OFFRNG which was set to the offset added by
5656 the function (e.g., memchr) to the overall offset. */
5657 offset_int offrng[2];
5658 if (tree ret = gimple_call_return_array (stmt, offrng, rvals))
5660 if (!compute_objsize_r (ret, ostype, pref, snlim, qry))
5661 return false;
5663 /* Cap OFFRNG[1] to at most the remaining size of
5664 the object. */
5665 offset_int remrng[2];
5666 remrng[1] = pref->size_remaining (remrng);
5667 if (remrng[1] < offrng[1])
5668 offrng[1] = remrng[1];
5669 pref->add_offset (offrng[0], offrng[1]);
5671 else
5673 /* For other calls that might return arbitrary pointers
5674 including into the middle of objects set the size
5675 range to maximum, clear PREF->BASE0, and also set
5676 PREF->REF to include in diagnostics. */
5677 pref->set_max_size_range ();
5678 pref->base0 = false;
5679 pref->ref = ptr;
5682 qry->put_ref (ptr, *pref);
5683 return true;
5686 if (gimple_nop_p (stmt))
5688 /* For a function argument try to determine the byte size
5689 of the array from the current function declaratation
5690 (e.g., attribute access or related). */
5691 wide_int wr[2];
5692 bool static_array = false;
5693 if (tree ref = gimple_parm_array_size (ptr, wr, &static_array))
5695 pref->parmarray = !static_array;
5696 pref->sizrng[0] = offset_int::from (wr[0], UNSIGNED);
5697 pref->sizrng[1] = offset_int::from (wr[1], UNSIGNED);
5698 pref->ref = ref;
5699 qry->put_ref (ptr, *pref);
5700 return true;
5703 pref->set_max_size_range ();
5704 pref->base0 = false;
5705 pref->ref = ptr;
5706 qry->put_ref (ptr, *pref);
5707 return true;
5710 if (gimple_code (stmt) == GIMPLE_PHI)
5712 pref->ref = ptr;
5713 access_ref phi_ref = *pref;
5714 if (!pref->get_ref (NULL, &phi_ref, ostype, &snlim, qry))
5715 return false;
5716 *pref = phi_ref;
5717 pref->ref = ptr;
5718 qry->put_ref (ptr, *pref);
5719 return true;
5722 if (!is_gimple_assign (stmt))
5724 /* Clear BASE0 since the assigned pointer might point into
5725 the middle of the object, set the maximum size range and,
5726 if the SSA_NAME refers to a function argumnent, set
5727 PREF->REF to it. */
5728 pref->base0 = false;
5729 pref->set_max_size_range ();
5730 pref->ref = ptr;
5731 return true;
5734 tree_code code = gimple_assign_rhs_code (stmt);
5736 if (code == MAX_EXPR || code == MIN_EXPR)
5738 if (!handle_min_max_size (stmt, ostype, pref, snlim, qry))
5739 return false;
5740 qry->put_ref (ptr, *pref);
5741 return true;
5744 tree rhs = gimple_assign_rhs1 (stmt);
5746 if (code == POINTER_PLUS_EXPR
5747 && TREE_CODE (TREE_TYPE (rhs)) == POINTER_TYPE)
5749 /* Compute the size of the object first. */
5750 if (!compute_objsize_r (rhs, ostype, pref, snlim, qry))
5751 return false;
5753 offset_int orng[2];
5754 tree off = gimple_assign_rhs2 (stmt);
5755 if (get_offset_range (off, stmt, orng, rvals))
5756 pref->add_offset (orng[0], orng[1]);
5757 else
5758 pref->add_max_offset ();
5759 qry->put_ref (ptr, *pref);
5760 return true;
5763 if (code == ADDR_EXPR
5764 || code == SSA_NAME)
5765 return compute_objsize_r (rhs, ostype, pref, snlim, qry);
5767 /* (This could also be an assignment from a nonlocal pointer.) Save
5768 PTR to mention in diagnostics but otherwise treat it as a pointer
5769 to an unknown object. */
5770 pref->ref = rhs;
5771 pref->base0 = false;
5772 pref->set_max_size_range ();
5773 return true;
5776 /* Assume all other expressions point into an unknown object
5777 of the maximum valid size. */
5778 pref->ref = ptr;
5779 pref->base0 = false;
5780 pref->set_max_size_range ();
5781 if (TREE_CODE (ptr) == SSA_NAME)
5782 qry->put_ref (ptr, *pref);
5783 return true;
5786 /* A "public" wrapper around the above. Clients should use this overload
5787 instead. */
5789 tree
5790 compute_objsize (tree ptr, int ostype, access_ref *pref,
5791 range_query *rvals /* = NULL */)
5793 pointer_query qry;
5794 qry.rvals = rvals;
5795 ssa_name_limit_t snlim;
5796 if (!compute_objsize_r (ptr, ostype, pref, snlim, &qry))
5797 return NULL_TREE;
5799 offset_int maxsize = pref->size_remaining ();
5800 if (pref->base0 && pref->offrng[0] < 0 && pref->offrng[1] >= 0)
5801 pref->offrng[0] = 0;
5802 return wide_int_to_tree (sizetype, maxsize);
5805 /* Transitional wrapper. The function should be removed once callers
5806 transition to the pointer_query API. */
5808 tree
5809 compute_objsize (tree ptr, int ostype, access_ref *pref, pointer_query *ptr_qry)
5811 pointer_query qry;
5812 if (ptr_qry)
5813 ptr_qry->depth = 0;
5814 else
5815 ptr_qry = &qry;
5817 ssa_name_limit_t snlim;
5818 if (!compute_objsize_r (ptr, ostype, pref, snlim, ptr_qry))
5819 return NULL_TREE;
5821 offset_int maxsize = pref->size_remaining ();
5822 if (pref->base0 && pref->offrng[0] < 0 && pref->offrng[1] >= 0)
5823 pref->offrng[0] = 0;
5824 return wide_int_to_tree (sizetype, maxsize);
5827 /* Legacy wrapper around the above. The function should be removed
5828 once callers transition to one of the two above. */
5830 tree
5831 compute_objsize (tree ptr, int ostype, tree *pdecl /* = NULL */,
5832 tree *poff /* = NULL */, range_query *rvals /* = NULL */)
5834 /* Set the initial offsets to zero and size to negative to indicate
5835 none has been computed yet. */
5836 access_ref ref;
5837 tree size = compute_objsize (ptr, ostype, &ref, rvals);
5838 if (!size || !ref.base0)
5839 return NULL_TREE;
5841 if (pdecl)
5842 *pdecl = ref.ref;
5844 if (poff)
5845 *poff = wide_int_to_tree (ptrdiff_type_node, ref.offrng[ref.offrng[0] < 0]);
5847 return size;
5850 /* Helper to determine and check the sizes of the source and the destination
5851 of calls to __builtin_{bzero,memcpy,mempcpy,memset} calls. EXP is the
5852 call expression, DEST is the destination argument, SRC is the source
5853 argument or null, and LEN is the number of bytes. Use Object Size type-0
5854 regardless of the OPT_Wstringop_overflow_ setting. Return true on success
5855 (no overflow or invalid sizes), false otherwise. */
5857 static bool
5858 check_memop_access (tree exp, tree dest, tree src, tree size)
5860 /* For functions like memset and memcpy that operate on raw memory
5861 try to determine the size of the largest source and destination
5862 object using type-0 Object Size regardless of the object size
5863 type specified by the option. */
5864 access_data data (exp, access_read_write);
5865 tree srcsize = src ? compute_objsize (src, 0, &data.src) : NULL_TREE;
5866 tree dstsize = compute_objsize (dest, 0, &data.dst);
5868 return check_access (exp, size, /*maxread=*/NULL_TREE,
5869 srcsize, dstsize, data.mode, &data);
5872 /* Validate memchr arguments without performing any expansion.
5873 Return NULL_RTX. */
5875 static rtx
5876 expand_builtin_memchr (tree exp, rtx)
5878 if (!validate_arglist (exp,
5879 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
5880 return NULL_RTX;
5882 tree arg1 = CALL_EXPR_ARG (exp, 0);
5883 tree len = CALL_EXPR_ARG (exp, 2);
5885 check_read_access (exp, arg1, len, 0);
5887 return NULL_RTX;
5890 /* Expand a call EXP to the memcpy builtin.
5891 Return NULL_RTX if we failed, the caller should emit a normal call,
5892 otherwise try to get the result in TARGET, if convenient (and in
5893 mode MODE if that's convenient). */
5895 static rtx
5896 expand_builtin_memcpy (tree exp, rtx target)
5898 if (!validate_arglist (exp,
5899 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5900 return NULL_RTX;
5902 tree dest = CALL_EXPR_ARG (exp, 0);
5903 tree src = CALL_EXPR_ARG (exp, 1);
5904 tree len = CALL_EXPR_ARG (exp, 2);
5906 check_memop_access (exp, dest, src, len);
5908 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
5909 /*retmode=*/ RETURN_BEGIN, false);
5912 /* Check a call EXP to the memmove built-in for validity.
5913 Return NULL_RTX on both success and failure. */
5915 static rtx
5916 expand_builtin_memmove (tree exp, rtx target)
5918 if (!validate_arglist (exp,
5919 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5920 return NULL_RTX;
5922 tree dest = CALL_EXPR_ARG (exp, 0);
5923 tree src = CALL_EXPR_ARG (exp, 1);
5924 tree len = CALL_EXPR_ARG (exp, 2);
5926 check_memop_access (exp, dest, src, len);
5928 return expand_builtin_memory_copy_args (dest, src, len, target, exp,
5929 /*retmode=*/ RETURN_BEGIN, true);
5932 /* Expand a call EXP to the mempcpy builtin.
5933 Return NULL_RTX if we failed; the caller should emit a normal call,
5934 otherwise try to get the result in TARGET, if convenient (and in
5935 mode MODE if that's convenient). */
5937 static rtx
5938 expand_builtin_mempcpy (tree exp, rtx target)
5940 if (!validate_arglist (exp,
5941 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
5942 return NULL_RTX;
5944 tree dest = CALL_EXPR_ARG (exp, 0);
5945 tree src = CALL_EXPR_ARG (exp, 1);
5946 tree len = CALL_EXPR_ARG (exp, 2);
5948 /* Policy does not generally allow using compute_objsize (which
5949 is used internally by check_memop_size) to change code generation
5950 or drive optimization decisions.
5952 In this instance it is safe because the code we generate has
5953 the same semantics regardless of the return value of
5954 check_memop_sizes. Exactly the same amount of data is copied
5955 and the return value is exactly the same in both cases.
5957 Furthermore, check_memop_size always uses mode 0 for the call to
5958 compute_objsize, so the imprecise nature of compute_objsize is
5959 avoided. */
5961 /* Avoid expanding mempcpy into memcpy when the call is determined
5962 to overflow the buffer. This also prevents the same overflow
5963 from being diagnosed again when expanding memcpy. */
5964 if (!check_memop_access (exp, dest, src, len))
5965 return NULL_RTX;
5967 return expand_builtin_mempcpy_args (dest, src, len,
5968 target, exp, /*retmode=*/ RETURN_END);
5971 /* Helper function to do the actual work for expand of memory copy family
5972 functions (memcpy, mempcpy, stpcpy). Expansing should assign LEN bytes
5973 of memory from SRC to DEST and assign to TARGET if convenient. Return
5974 value is based on RETMODE argument. */
5976 static rtx
5977 expand_builtin_memory_copy_args (tree dest, tree src, tree len,
5978 rtx target, tree exp, memop_ret retmode,
5979 bool might_overlap)
5981 unsigned int src_align = get_pointer_alignment (src);
5982 unsigned int dest_align = get_pointer_alignment (dest);
5983 rtx dest_mem, src_mem, dest_addr, len_rtx;
5984 HOST_WIDE_INT expected_size = -1;
5985 unsigned int expected_align = 0;
5986 unsigned HOST_WIDE_INT min_size;
5987 unsigned HOST_WIDE_INT max_size;
5988 unsigned HOST_WIDE_INT probable_max_size;
5990 bool is_move_done;
5992 /* If DEST is not a pointer type, call the normal function. */
5993 if (dest_align == 0)
5994 return NULL_RTX;
5996 /* If either SRC is not a pointer type, don't do this
5997 operation in-line. */
5998 if (src_align == 0)
5999 return NULL_RTX;
6001 if (currently_expanding_gimple_stmt)
6002 stringop_block_profile (currently_expanding_gimple_stmt,
6003 &expected_align, &expected_size);
6005 if (expected_align < dest_align)
6006 expected_align = dest_align;
6007 dest_mem = get_memory_rtx (dest, len);
6008 set_mem_align (dest_mem, dest_align);
6009 len_rtx = expand_normal (len);
6010 determine_block_size (len, len_rtx, &min_size, &max_size,
6011 &probable_max_size);
6013 /* Try to get the byte representation of the constant SRC points to,
6014 with its byte size in NBYTES. */
6015 unsigned HOST_WIDE_INT nbytes;
6016 const char *rep = getbyterep (src, &nbytes);
6018 /* If the function's constant bound LEN_RTX is less than or equal
6019 to the byte size of the representation of the constant argument,
6020 and if block move would be done by pieces, we can avoid loading
6021 the bytes from memory and only store the computed constant.
6022 This works in the overlap (memmove) case as well because
6023 store_by_pieces just generates a series of stores of constants
6024 from the representation returned by getbyterep(). */
6025 if (rep
6026 && CONST_INT_P (len_rtx)
6027 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= nbytes
6028 && can_store_by_pieces (INTVAL (len_rtx), builtin_memcpy_read_str,
6029 CONST_CAST (char *, rep),
6030 dest_align, false))
6032 dest_mem = store_by_pieces (dest_mem, INTVAL (len_rtx),
6033 builtin_memcpy_read_str,
6034 CONST_CAST (char *, rep),
6035 dest_align, false, retmode);
6036 dest_mem = force_operand (XEXP (dest_mem, 0), target);
6037 dest_mem = convert_memory_address (ptr_mode, dest_mem);
6038 return dest_mem;
6041 src_mem = get_memory_rtx (src, len);
6042 set_mem_align (src_mem, src_align);
6044 /* Copy word part most expediently. */
6045 enum block_op_methods method = BLOCK_OP_NORMAL;
6046 if (CALL_EXPR_TAILCALL (exp)
6047 && (retmode == RETURN_BEGIN || target == const0_rtx))
6048 method = BLOCK_OP_TAILCALL;
6049 bool use_mempcpy_call = (targetm.libc_has_fast_function (BUILT_IN_MEMPCPY)
6050 && retmode == RETURN_END
6051 && !might_overlap
6052 && target != const0_rtx);
6053 if (use_mempcpy_call)
6054 method = BLOCK_OP_NO_LIBCALL_RET;
6055 dest_addr = emit_block_move_hints (dest_mem, src_mem, len_rtx, method,
6056 expected_align, expected_size,
6057 min_size, max_size, probable_max_size,
6058 use_mempcpy_call, &is_move_done,
6059 might_overlap);
6061 /* Bail out when a mempcpy call would be expanded as libcall and when
6062 we have a target that provides a fast implementation
6063 of mempcpy routine. */
6064 if (!is_move_done)
6065 return NULL_RTX;
6067 if (dest_addr == pc_rtx)
6068 return NULL_RTX;
6070 if (dest_addr == 0)
6072 dest_addr = force_operand (XEXP (dest_mem, 0), target);
6073 dest_addr = convert_memory_address (ptr_mode, dest_addr);
6076 if (retmode != RETURN_BEGIN && target != const0_rtx)
6078 dest_addr = gen_rtx_PLUS (ptr_mode, dest_addr, len_rtx);
6079 /* stpcpy pointer to last byte. */
6080 if (retmode == RETURN_END_MINUS_ONE)
6081 dest_addr = gen_rtx_MINUS (ptr_mode, dest_addr, const1_rtx);
6084 return dest_addr;
6087 static rtx
6088 expand_builtin_mempcpy_args (tree dest, tree src, tree len,
6089 rtx target, tree orig_exp, memop_ret retmode)
6091 return expand_builtin_memory_copy_args (dest, src, len, target, orig_exp,
6092 retmode, false);
6095 /* Expand into a movstr instruction, if one is available. Return NULL_RTX if
6096 we failed, the caller should emit a normal call, otherwise try to
6097 get the result in TARGET, if convenient.
6098 Return value is based on RETMODE argument. */
6100 static rtx
6101 expand_movstr (tree dest, tree src, rtx target, memop_ret retmode)
6103 class expand_operand ops[3];
6104 rtx dest_mem;
6105 rtx src_mem;
6107 if (!targetm.have_movstr ())
6108 return NULL_RTX;
6110 dest_mem = get_memory_rtx (dest, NULL);
6111 src_mem = get_memory_rtx (src, NULL);
6112 if (retmode == RETURN_BEGIN)
6114 target = force_reg (Pmode, XEXP (dest_mem, 0));
6115 dest_mem = replace_equiv_address (dest_mem, target);
6118 create_output_operand (&ops[0],
6119 retmode != RETURN_BEGIN ? target : NULL_RTX, Pmode);
6120 create_fixed_operand (&ops[1], dest_mem);
6121 create_fixed_operand (&ops[2], src_mem);
6122 if (!maybe_expand_insn (targetm.code_for_movstr, 3, ops))
6123 return NULL_RTX;
6125 if (retmode != RETURN_BEGIN && target != const0_rtx)
6127 target = ops[0].value;
6128 /* movstr is supposed to set end to the address of the NUL
6129 terminator. If the caller requested a mempcpy-like return value,
6130 adjust it. */
6131 if (retmode == RETURN_END)
6133 rtx tem = plus_constant (GET_MODE (target),
6134 gen_lowpart (GET_MODE (target), target), 1);
6135 emit_move_insn (target, force_operand (tem, NULL_RTX));
6138 return target;
6141 /* Do some very basic size validation of a call to the strcpy builtin
6142 given by EXP. Return NULL_RTX to have the built-in expand to a call
6143 to the library function. */
6145 static rtx
6146 expand_builtin_strcat (tree exp)
6148 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE)
6149 || !warn_stringop_overflow)
6150 return NULL_RTX;
6152 tree dest = CALL_EXPR_ARG (exp, 0);
6153 tree src = CALL_EXPR_ARG (exp, 1);
6155 /* There is no way here to determine the length of the string in
6156 the destination to which the SRC string is being appended so
6157 just diagnose cases when the souce string is longer than
6158 the destination object. */
6159 access_data data (exp, access_read_write, NULL_TREE, true,
6160 NULL_TREE, true);
6161 const int ost = warn_stringop_overflow ? warn_stringop_overflow - 1 : 1;
6162 compute_objsize (src, ost, &data.src);
6163 tree destsize = compute_objsize (dest, ost, &data.dst);
6165 check_access (exp, /*dstwrite=*/NULL_TREE, /*maxread=*/NULL_TREE,
6166 src, destsize, data.mode, &data);
6168 return NULL_RTX;
6171 /* Expand expression EXP, which is a call to the strcpy builtin. Return
6172 NULL_RTX if we failed the caller should emit a normal call, otherwise
6173 try to get the result in TARGET, if convenient (and in mode MODE if that's
6174 convenient). */
6176 static rtx
6177 expand_builtin_strcpy (tree exp, rtx target)
6179 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6180 return NULL_RTX;
6182 tree dest = CALL_EXPR_ARG (exp, 0);
6183 tree src = CALL_EXPR_ARG (exp, 1);
6185 if (warn_stringop_overflow)
6187 access_data data (exp, access_read_write, NULL_TREE, true,
6188 NULL_TREE, true);
6189 const int ost = warn_stringop_overflow ? warn_stringop_overflow - 1 : 1;
6190 compute_objsize (src, ost, &data.src);
6191 tree dstsize = compute_objsize (dest, ost, &data.dst);
6192 check_access (exp, /*dstwrite=*/ NULL_TREE,
6193 /*maxread=*/ NULL_TREE, /*srcstr=*/ src,
6194 dstsize, data.mode, &data);
6197 if (rtx ret = expand_builtin_strcpy_args (exp, dest, src, target))
6199 /* Check to see if the argument was declared attribute nonstring
6200 and if so, issue a warning since at this point it's not known
6201 to be nul-terminated. */
6202 tree fndecl = get_callee_fndecl (exp);
6203 maybe_warn_nonstring_arg (fndecl, exp);
6204 return ret;
6207 return NULL_RTX;
6210 /* Helper function to do the actual work for expand_builtin_strcpy. The
6211 arguments to the builtin_strcpy call DEST and SRC are broken out
6212 so that this can also be called without constructing an actual CALL_EXPR.
6213 The other arguments and return value are the same as for
6214 expand_builtin_strcpy. */
6216 static rtx
6217 expand_builtin_strcpy_args (tree exp, tree dest, tree src, rtx target)
6219 /* Detect strcpy calls with unterminated arrays.. */
6220 tree size;
6221 bool exact;
6222 if (tree nonstr = unterminated_array (src, &size, &exact))
6224 /* NONSTR refers to the non-nul terminated constant array. */
6225 warn_string_no_nul (EXPR_LOCATION (exp), exp, NULL, src, nonstr,
6226 size, exact);
6227 return NULL_RTX;
6230 return expand_movstr (dest, src, target, /*retmode=*/ RETURN_BEGIN);
6233 /* Expand a call EXP to the stpcpy builtin.
6234 Return NULL_RTX if we failed the caller should emit a normal call,
6235 otherwise try to get the result in TARGET, if convenient (and in
6236 mode MODE if that's convenient). */
6238 static rtx
6239 expand_builtin_stpcpy_1 (tree exp, rtx target, machine_mode mode)
6241 tree dst, src;
6242 location_t loc = EXPR_LOCATION (exp);
6244 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
6245 return NULL_RTX;
6247 dst = CALL_EXPR_ARG (exp, 0);
6248 src = CALL_EXPR_ARG (exp, 1);
6250 if (warn_stringop_overflow)
6252 access_data data (exp, access_read_write);
6253 tree destsize = compute_objsize (dst, warn_stringop_overflow - 1,
6254 &data.dst);
6255 check_access (exp, /*dstwrite=*/NULL_TREE, /*maxread=*/NULL_TREE,
6256 src, destsize, data.mode, &data);
6259 /* If return value is ignored, transform stpcpy into strcpy. */
6260 if (target == const0_rtx && builtin_decl_implicit (BUILT_IN_STRCPY))
6262 tree fn = builtin_decl_implicit (BUILT_IN_STRCPY);
6263 tree result = build_call_nofold_loc (loc, fn, 2, dst, src);
6264 return expand_expr (result, target, mode, EXPAND_NORMAL);
6266 else
6268 tree len, lenp1;
6269 rtx ret;
6271 /* Ensure we get an actual string whose length can be evaluated at
6272 compile-time, not an expression containing a string. This is
6273 because the latter will potentially produce pessimized code
6274 when used to produce the return value. */
6275 c_strlen_data lendata = { };
6276 if (!c_getstr (src)
6277 || !(len = c_strlen (src, 0, &lendata, 1)))
6278 return expand_movstr (dst, src, target,
6279 /*retmode=*/ RETURN_END_MINUS_ONE);
6281 if (lendata.decl)
6282 warn_string_no_nul (EXPR_LOCATION (exp), exp, NULL, src, lendata.decl);
6284 lenp1 = size_binop_loc (loc, PLUS_EXPR, len, ssize_int (1));
6285 ret = expand_builtin_mempcpy_args (dst, src, lenp1,
6286 target, exp,
6287 /*retmode=*/ RETURN_END_MINUS_ONE);
6289 if (ret)
6290 return ret;
6292 if (TREE_CODE (len) == INTEGER_CST)
6294 rtx len_rtx = expand_normal (len);
6296 if (CONST_INT_P (len_rtx))
6298 ret = expand_builtin_strcpy_args (exp, dst, src, target);
6300 if (ret)
6302 if (! target)
6304 if (mode != VOIDmode)
6305 target = gen_reg_rtx (mode);
6306 else
6307 target = gen_reg_rtx (GET_MODE (ret));
6309 if (GET_MODE (target) != GET_MODE (ret))
6310 ret = gen_lowpart (GET_MODE (target), ret);
6312 ret = plus_constant (GET_MODE (ret), ret, INTVAL (len_rtx));
6313 ret = emit_move_insn (target, force_operand (ret, NULL_RTX));
6314 gcc_assert (ret);
6316 return target;
6321 return expand_movstr (dst, src, target,
6322 /*retmode=*/ RETURN_END_MINUS_ONE);
6326 /* Expand a call EXP to the stpcpy builtin and diagnose uses of nonstring
6327 arguments while being careful to avoid duplicate warnings (which could
6328 be issued if the expander were to expand the call, resulting in it
6329 being emitted in expand_call(). */
6331 static rtx
6332 expand_builtin_stpcpy (tree exp, rtx target, machine_mode mode)
6334 if (rtx ret = expand_builtin_stpcpy_1 (exp, target, mode))
6336 /* The call has been successfully expanded. Check for nonstring
6337 arguments and issue warnings as appropriate. */
6338 maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp);
6339 return ret;
6342 return NULL_RTX;
6345 /* Check a call EXP to the stpncpy built-in for validity.
6346 Return NULL_RTX on both success and failure. */
6348 static rtx
6349 expand_builtin_stpncpy (tree exp, rtx)
6351 if (!validate_arglist (exp,
6352 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6353 || !warn_stringop_overflow)
6354 return NULL_RTX;
6356 /* The source and destination of the call. */
6357 tree dest = CALL_EXPR_ARG (exp, 0);
6358 tree src = CALL_EXPR_ARG (exp, 1);
6360 /* The exact number of bytes to write (not the maximum). */
6361 tree len = CALL_EXPR_ARG (exp, 2);
6362 access_data data (exp, access_read_write);
6363 /* The size of the destination object. */
6364 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1, &data.dst);
6365 check_access (exp, len, /*maxread=*/len, src, destsize, data.mode, &data);
6366 return NULL_RTX;
6369 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
6370 bytes from constant string DATA + OFFSET and return it as target
6371 constant. */
6374 builtin_strncpy_read_str (void *data, void *, HOST_WIDE_INT offset,
6375 scalar_int_mode mode)
6377 const char *str = (const char *) data;
6379 if ((unsigned HOST_WIDE_INT) offset > strlen (str))
6380 return const0_rtx;
6382 return c_readstr (str + offset, mode);
6385 /* Helper to check the sizes of sequences and the destination of calls
6386 to __builtin_strncat and __builtin___strncat_chk. Returns true on
6387 success (no overflow or invalid sizes), false otherwise. */
6389 static bool
6390 check_strncat_sizes (tree exp, tree objsize)
6392 tree dest = CALL_EXPR_ARG (exp, 0);
6393 tree src = CALL_EXPR_ARG (exp, 1);
6394 tree maxread = CALL_EXPR_ARG (exp, 2);
6396 /* Try to determine the range of lengths that the source expression
6397 refers to. */
6398 c_strlen_data lendata = { };
6399 get_range_strlen (src, &lendata, /* eltsize = */ 1);
6401 /* Try to verify that the destination is big enough for the shortest
6402 string. */
6404 access_data data (exp, access_read_write, maxread, true);
6405 if (!objsize && warn_stringop_overflow)
6407 /* If it hasn't been provided by __strncat_chk, try to determine
6408 the size of the destination object into which the source is
6409 being copied. */
6410 objsize = compute_objsize (dest, warn_stringop_overflow - 1, &data.dst);
6413 /* Add one for the terminating nul. */
6414 tree srclen = (lendata.minlen
6415 ? fold_build2 (PLUS_EXPR, size_type_node, lendata.minlen,
6416 size_one_node)
6417 : NULL_TREE);
6419 /* The strncat function copies at most MAXREAD bytes and always appends
6420 the terminating nul so the specified upper bound should never be equal
6421 to (or greater than) the size of the destination. */
6422 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (objsize)
6423 && tree_int_cst_equal (objsize, maxread))
6425 location_t loc = tree_inlined_location (exp);
6426 warning_at (loc, OPT_Wstringop_overflow_,
6427 "%K%qD specified bound %E equals destination size",
6428 exp, get_callee_fndecl (exp), maxread);
6430 return false;
6433 if (!srclen
6434 || (maxread && tree_fits_uhwi_p (maxread)
6435 && tree_fits_uhwi_p (srclen)
6436 && tree_int_cst_lt (maxread, srclen)))
6437 srclen = maxread;
6439 /* The number of bytes to write is LEN but check_access will alsoa
6440 check SRCLEN if LEN's value isn't known. */
6441 return check_access (exp, /*dstwrite=*/NULL_TREE, maxread, srclen,
6442 objsize, data.mode, &data);
6445 /* Similar to expand_builtin_strcat, do some very basic size validation
6446 of a call to the strcpy builtin given by EXP. Return NULL_RTX to have
6447 the built-in expand to a call to the library function. */
6449 static rtx
6450 expand_builtin_strncat (tree exp, rtx)
6452 if (!validate_arglist (exp,
6453 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
6454 || !warn_stringop_overflow)
6455 return NULL_RTX;
6457 tree dest = CALL_EXPR_ARG (exp, 0);
6458 tree src = CALL_EXPR_ARG (exp, 1);
6459 /* The upper bound on the number of bytes to write. */
6460 tree maxread = CALL_EXPR_ARG (exp, 2);
6462 /* Detect unterminated source (only). */
6463 if (!check_nul_terminated_array (exp, src, maxread))
6464 return NULL_RTX;
6466 /* The length of the source sequence. */
6467 tree slen = c_strlen (src, 1);
6469 /* Try to determine the range of lengths that the source expression
6470 refers to. Since the lengths are only used for warning and not
6471 for code generation disable strict mode below. */
6472 tree maxlen = slen;
6473 if (!maxlen)
6475 c_strlen_data lendata = { };
6476 get_range_strlen (src, &lendata, /* eltsize = */ 1);
6477 maxlen = lendata.maxbound;
6480 access_data data (exp, access_read_write);
6481 /* Try to verify that the destination is big enough for the shortest
6482 string. First try to determine the size of the destination object
6483 into which the source is being copied. */
6484 tree destsize = compute_objsize (dest, warn_stringop_overflow - 1, &data.dst);
6486 /* Add one for the terminating nul. */
6487 tree srclen = (maxlen
6488 ? fold_build2 (PLUS_EXPR, size_type_node, maxlen,
6489 size_one_node)
6490 : NULL_TREE);
6492 /* The strncat function copies at most MAXREAD bytes and always appends
6493 the terminating nul so the specified upper bound should never be equal
6494 to (or greater than) the size of the destination. */
6495 if (tree_fits_uhwi_p (maxread) && tree_fits_uhwi_p (destsize)
6496 && tree_int_cst_equal (destsize, maxread))
6498 location_t loc = tree_inlined_location (exp);
6499 warning_at (loc, OPT_Wstringop_overflow_,
6500 "%K%qD specified bound %E equals destination size",
6501 exp, get_callee_fndecl (exp), maxread);
6503 return NULL_RTX;
6506 if (!srclen
6507 || (maxread && tree_fits_uhwi_p (maxread)
6508 && tree_fits_uhwi_p (srclen)
6509 && tree_int_cst_lt (maxread, srclen)))
6510 srclen = maxread;
6512 check_access (exp, /*dstwrite=*/NULL_TREE, maxread, srclen,
6513 destsize, data.mode, &data);
6514 return NULL_RTX;
6517 /* Expand expression EXP, which is a call to the strncpy builtin. Return
6518 NULL_RTX if we failed the caller should emit a normal call. */
6520 static rtx
6521 expand_builtin_strncpy (tree exp, rtx target)
6523 location_t loc = EXPR_LOCATION (exp);
6525 if (!validate_arglist (exp,
6526 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6527 return NULL_RTX;
6528 tree dest = CALL_EXPR_ARG (exp, 0);
6529 tree src = CALL_EXPR_ARG (exp, 1);
6530 /* The number of bytes to write (not the maximum). */
6531 tree len = CALL_EXPR_ARG (exp, 2);
6533 /* The length of the source sequence. */
6534 tree slen = c_strlen (src, 1);
6536 if (warn_stringop_overflow)
6538 access_data data (exp, access_read_write, len, true, len, true);
6539 const int ost = warn_stringop_overflow ? warn_stringop_overflow - 1 : 1;
6540 compute_objsize (src, ost, &data.src);
6541 tree dstsize = compute_objsize (dest, ost, &data.dst);
6542 /* The number of bytes to write is LEN but check_access will also
6543 check SLEN if LEN's value isn't known. */
6544 check_access (exp, /*dstwrite=*/len,
6545 /*maxread=*/len, src, dstsize, data.mode, &data);
6548 /* We must be passed a constant len and src parameter. */
6549 if (!tree_fits_uhwi_p (len) || !slen || !tree_fits_uhwi_p (slen))
6550 return NULL_RTX;
6552 slen = size_binop_loc (loc, PLUS_EXPR, slen, ssize_int (1));
6554 /* We're required to pad with trailing zeros if the requested
6555 len is greater than strlen(s2)+1. In that case try to
6556 use store_by_pieces, if it fails, punt. */
6557 if (tree_int_cst_lt (slen, len))
6559 unsigned int dest_align = get_pointer_alignment (dest);
6560 const char *p = c_getstr (src);
6561 rtx dest_mem;
6563 if (!p || dest_align == 0 || !tree_fits_uhwi_p (len)
6564 || !can_store_by_pieces (tree_to_uhwi (len),
6565 builtin_strncpy_read_str,
6566 CONST_CAST (char *, p),
6567 dest_align, false))
6568 return NULL_RTX;
6570 dest_mem = get_memory_rtx (dest, len);
6571 store_by_pieces (dest_mem, tree_to_uhwi (len),
6572 builtin_strncpy_read_str,
6573 CONST_CAST (char *, p), dest_align, false,
6574 RETURN_BEGIN);
6575 dest_mem = force_operand (XEXP (dest_mem, 0), target);
6576 dest_mem = convert_memory_address (ptr_mode, dest_mem);
6577 return dest_mem;
6580 return NULL_RTX;
6583 /* Callback routine for store_by_pieces. Read GET_MODE_BITSIZE (MODE)
6584 bytes from constant string DATA + OFFSET and return it as target
6585 constant. If PREV isn't nullptr, it has the RTL info from the
6586 previous iteration. */
6589 builtin_memset_read_str (void *data, void *prevp,
6590 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
6591 scalar_int_mode mode)
6593 by_pieces_prev *prev = (by_pieces_prev *) prevp;
6594 if (prev != nullptr && prev->data != nullptr)
6596 /* Use the previous data in the same mode. */
6597 if (prev->mode == mode)
6598 return prev->data;
6601 const char *c = (const char *) data;
6602 char *p = XALLOCAVEC (char, GET_MODE_SIZE (mode));
6604 memset (p, *c, GET_MODE_SIZE (mode));
6606 return c_readstr (p, mode);
6609 /* Callback routine for store_by_pieces. Return the RTL of a register
6610 containing GET_MODE_SIZE (MODE) consecutive copies of the unsigned
6611 char value given in the RTL register data. For example, if mode is
6612 4 bytes wide, return the RTL for 0x01010101*data. If PREV isn't
6613 nullptr, it has the RTL info from the previous iteration. */
6615 static rtx
6616 builtin_memset_gen_str (void *data, void *prevp,
6617 HOST_WIDE_INT offset ATTRIBUTE_UNUSED,
6618 scalar_int_mode mode)
6620 rtx target, coeff;
6621 size_t size;
6622 char *p;
6624 by_pieces_prev *prev = (by_pieces_prev *) prevp;
6625 if (prev != nullptr && prev->data != nullptr)
6627 /* Use the previous data in the same mode. */
6628 if (prev->mode == mode)
6629 return prev->data;
6631 target = simplify_gen_subreg (mode, prev->data, prev->mode, 0);
6632 if (target != nullptr)
6633 return target;
6636 size = GET_MODE_SIZE (mode);
6637 if (size == 1)
6638 return (rtx) data;
6640 p = XALLOCAVEC (char, size);
6641 memset (p, 1, size);
6642 coeff = c_readstr (p, mode);
6644 target = convert_to_mode (mode, (rtx) data, 1);
6645 target = expand_mult (mode, target, coeff, NULL_RTX, 1);
6646 return force_reg (mode, target);
6649 /* Expand expression EXP, which is a call to the memset builtin. Return
6650 NULL_RTX if we failed the caller should emit a normal call, otherwise
6651 try to get the result in TARGET, if convenient (and in mode MODE if that's
6652 convenient). */
6654 static rtx
6655 expand_builtin_memset (tree exp, rtx target, machine_mode mode)
6657 if (!validate_arglist (exp,
6658 POINTER_TYPE, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
6659 return NULL_RTX;
6661 tree dest = CALL_EXPR_ARG (exp, 0);
6662 tree val = CALL_EXPR_ARG (exp, 1);
6663 tree len = CALL_EXPR_ARG (exp, 2);
6665 check_memop_access (exp, dest, NULL_TREE, len);
6667 return expand_builtin_memset_args (dest, val, len, target, mode, exp);
6670 /* Try to store VAL (or, if NULL_RTX, VALC) in LEN bytes starting at TO.
6671 Return TRUE if successful, FALSE otherwise. TO is assumed to be
6672 aligned at an ALIGN-bits boundary. LEN must be a multiple of
6673 1<<CTZ_LEN between MIN_LEN and MAX_LEN.
6675 The strategy is to issue one store_by_pieces for each power of two,
6676 from most to least significant, guarded by a test on whether there
6677 are at least that many bytes left to copy in LEN.
6679 ??? Should we skip some powers of two in favor of loops? Maybe start
6680 at the max of TO/LEN/word alignment, at least when optimizing for
6681 size, instead of ensuring O(log len) dynamic compares? */
6683 bool
6684 try_store_by_multiple_pieces (rtx to, rtx len, unsigned int ctz_len,
6685 unsigned HOST_WIDE_INT min_len,
6686 unsigned HOST_WIDE_INT max_len,
6687 rtx val, char valc, unsigned int align)
6689 int max_bits = floor_log2 (max_len);
6690 int min_bits = floor_log2 (min_len);
6691 int sctz_len = ctz_len;
6693 gcc_checking_assert (sctz_len >= 0);
6695 if (val)
6696 valc = 1;
6698 /* Bits more significant than TST_BITS are part of the shared prefix
6699 in the binary representation of both min_len and max_len. Since
6700 they're identical, we don't need to test them in the loop. */
6701 int tst_bits = (max_bits != min_bits ? max_bits
6702 : floor_log2 (max_len ^ min_len));
6704 /* Check whether it's profitable to start by storing a fixed BLKSIZE
6705 bytes, to lower max_bits. In the unlikely case of a constant LEN
6706 (implied by identical MAX_LEN and MIN_LEN), we want to issue a
6707 single store_by_pieces, but otherwise, select the minimum multiple
6708 of the ALIGN (in bytes) and of the MCD of the possible LENs, that
6709 brings MAX_LEN below TST_BITS, if that's lower than min_len. */
6710 unsigned HOST_WIDE_INT blksize;
6711 if (max_len > min_len)
6713 unsigned HOST_WIDE_INT alrng = MAX (HOST_WIDE_INT_1U << ctz_len,
6714 align / BITS_PER_UNIT);
6715 blksize = max_len - (HOST_WIDE_INT_1U << tst_bits) + alrng;
6716 blksize &= ~(alrng - 1);
6718 else if (max_len == min_len)
6719 blksize = max_len;
6720 else
6721 gcc_unreachable ();
6722 if (min_len >= blksize)
6724 min_len -= blksize;
6725 min_bits = floor_log2 (min_len);
6726 max_len -= blksize;
6727 max_bits = floor_log2 (max_len);
6729 tst_bits = (max_bits != min_bits ? max_bits
6730 : floor_log2 (max_len ^ min_len));
6732 else
6733 blksize = 0;
6735 /* Check that we can use store by pieces for the maximum store count
6736 we may issue (initial fixed-size block, plus conditional
6737 power-of-two-sized from max_bits to ctz_len. */
6738 unsigned HOST_WIDE_INT xlenest = blksize;
6739 if (max_bits >= 0)
6740 xlenest += ((HOST_WIDE_INT_1U << max_bits) * 2
6741 - (HOST_WIDE_INT_1U << ctz_len));
6742 if (!can_store_by_pieces (xlenest, builtin_memset_read_str,
6743 &valc, align, true))
6744 return false;
6746 rtx (*constfun) (void *, void *, HOST_WIDE_INT, scalar_int_mode);
6747 void *constfundata;
6748 if (val)
6750 constfun = builtin_memset_gen_str;
6751 constfundata = val = force_reg (TYPE_MODE (unsigned_char_type_node),
6752 val);
6754 else
6756 constfun = builtin_memset_read_str;
6757 constfundata = &valc;
6760 rtx ptr = copy_addr_to_reg (convert_to_mode (ptr_mode, XEXP (to, 0), 0));
6761 rtx rem = copy_to_mode_reg (ptr_mode, convert_to_mode (ptr_mode, len, 0));
6762 to = replace_equiv_address (to, ptr);
6763 set_mem_align (to, align);
6765 if (blksize)
6767 to = store_by_pieces (to, blksize,
6768 constfun, constfundata,
6769 align, true,
6770 max_len != 0 ? RETURN_END : RETURN_BEGIN);
6771 if (max_len == 0)
6772 return true;
6774 /* Adjust PTR, TO and REM. Since TO's address is likely
6775 PTR+offset, we have to replace it. */
6776 emit_move_insn (ptr, XEXP (to, 0));
6777 to = replace_equiv_address (to, ptr);
6778 emit_move_insn (rem, plus_constant (ptr_mode, rem, -blksize));
6781 /* Iterate over power-of-two block sizes from the maximum length to
6782 the least significant bit possibly set in the length. */
6783 for (int i = max_bits; i >= sctz_len; i--)
6785 rtx_code_label *label = NULL;
6786 blksize = HOST_WIDE_INT_1U << i;
6788 /* If we're past the bits shared between min_ and max_len, expand
6789 a test on the dynamic length, comparing it with the
6790 BLKSIZE. */
6791 if (i <= tst_bits)
6793 label = gen_label_rtx ();
6794 emit_cmp_and_jump_insns (rem, GEN_INT (blksize), LT, NULL,
6795 ptr_mode, 1, label,
6796 profile_probability::even ());
6798 /* If we are at a bit that is in the prefix shared by min_ and
6799 max_len, skip this BLKSIZE if the bit is clear. */
6800 else if ((max_len & blksize) == 0)
6801 continue;
6803 /* Issue a store of BLKSIZE bytes. */
6804 to = store_by_pieces (to, blksize,
6805 constfun, constfundata,
6806 align, true,
6807 i != sctz_len ? RETURN_END : RETURN_BEGIN);
6809 /* Adjust REM and PTR, unless this is the last iteration. */
6810 if (i != sctz_len)
6812 emit_move_insn (ptr, XEXP (to, 0));
6813 to = replace_equiv_address (to, ptr);
6814 emit_move_insn (rem, plus_constant (ptr_mode, rem, -blksize));
6817 if (label)
6819 emit_label (label);
6821 /* Given conditional stores, the offset can no longer be
6822 known, so clear it. */
6823 clear_mem_offset (to);
6827 return true;
6830 /* Helper function to do the actual work for expand_builtin_memset. The
6831 arguments to the builtin_memset call DEST, VAL, and LEN are broken out
6832 so that this can also be called without constructing an actual CALL_EXPR.
6833 The other arguments and return value are the same as for
6834 expand_builtin_memset. */
6836 static rtx
6837 expand_builtin_memset_args (tree dest, tree val, tree len,
6838 rtx target, machine_mode mode, tree orig_exp)
6840 tree fndecl, fn;
6841 enum built_in_function fcode;
6842 machine_mode val_mode;
6843 char c;
6844 unsigned int dest_align;
6845 rtx dest_mem, dest_addr, len_rtx;
6846 HOST_WIDE_INT expected_size = -1;
6847 unsigned int expected_align = 0;
6848 unsigned HOST_WIDE_INT min_size;
6849 unsigned HOST_WIDE_INT max_size;
6850 unsigned HOST_WIDE_INT probable_max_size;
6852 dest_align = get_pointer_alignment (dest);
6854 /* If DEST is not a pointer type, don't do this operation in-line. */
6855 if (dest_align == 0)
6856 return NULL_RTX;
6858 if (currently_expanding_gimple_stmt)
6859 stringop_block_profile (currently_expanding_gimple_stmt,
6860 &expected_align, &expected_size);
6862 if (expected_align < dest_align)
6863 expected_align = dest_align;
6865 /* If the LEN parameter is zero, return DEST. */
6866 if (integer_zerop (len))
6868 /* Evaluate and ignore VAL in case it has side-effects. */
6869 expand_expr (val, const0_rtx, VOIDmode, EXPAND_NORMAL);
6870 return expand_expr (dest, target, mode, EXPAND_NORMAL);
6873 /* Stabilize the arguments in case we fail. */
6874 dest = builtin_save_expr (dest);
6875 val = builtin_save_expr (val);
6876 len = builtin_save_expr (len);
6878 len_rtx = expand_normal (len);
6879 determine_block_size (len, len_rtx, &min_size, &max_size,
6880 &probable_max_size);
6881 dest_mem = get_memory_rtx (dest, len);
6882 val_mode = TYPE_MODE (unsigned_char_type_node);
6884 if (TREE_CODE (val) != INTEGER_CST
6885 || target_char_cast (val, &c))
6887 rtx val_rtx;
6889 val_rtx = expand_normal (val);
6890 val_rtx = convert_to_mode (val_mode, val_rtx, 0);
6892 /* Assume that we can memset by pieces if we can store
6893 * the coefficients by pieces (in the required modes).
6894 * We can't pass builtin_memset_gen_str as that emits RTL. */
6895 c = 1;
6896 if (tree_fits_uhwi_p (len)
6897 && can_store_by_pieces (tree_to_uhwi (len),
6898 builtin_memset_read_str, &c, dest_align,
6899 true))
6901 val_rtx = force_reg (val_mode, val_rtx);
6902 store_by_pieces (dest_mem, tree_to_uhwi (len),
6903 builtin_memset_gen_str, val_rtx, dest_align,
6904 true, RETURN_BEGIN);
6906 else if (!set_storage_via_setmem (dest_mem, len_rtx, val_rtx,
6907 dest_align, expected_align,
6908 expected_size, min_size, max_size,
6909 probable_max_size)
6910 && !try_store_by_multiple_pieces (dest_mem, len_rtx,
6911 tree_ctz (len),
6912 min_size, max_size,
6913 val_rtx, 0,
6914 dest_align))
6915 goto do_libcall;
6917 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
6918 dest_mem = convert_memory_address (ptr_mode, dest_mem);
6919 return dest_mem;
6922 if (c)
6924 if (tree_fits_uhwi_p (len)
6925 && can_store_by_pieces (tree_to_uhwi (len),
6926 builtin_memset_read_str, &c, dest_align,
6927 true))
6928 store_by_pieces (dest_mem, tree_to_uhwi (len),
6929 builtin_memset_read_str, &c, dest_align, true,
6930 RETURN_BEGIN);
6931 else if (!set_storage_via_setmem (dest_mem, len_rtx,
6932 gen_int_mode (c, val_mode),
6933 dest_align, expected_align,
6934 expected_size, min_size, max_size,
6935 probable_max_size)
6936 && !try_store_by_multiple_pieces (dest_mem, len_rtx,
6937 tree_ctz (len),
6938 min_size, max_size,
6939 NULL_RTX, c,
6940 dest_align))
6941 goto do_libcall;
6943 dest_mem = force_operand (XEXP (dest_mem, 0), NULL_RTX);
6944 dest_mem = convert_memory_address (ptr_mode, dest_mem);
6945 return dest_mem;
6948 set_mem_align (dest_mem, dest_align);
6949 dest_addr = clear_storage_hints (dest_mem, len_rtx,
6950 CALL_EXPR_TAILCALL (orig_exp)
6951 ? BLOCK_OP_TAILCALL : BLOCK_OP_NORMAL,
6952 expected_align, expected_size,
6953 min_size, max_size,
6954 probable_max_size, tree_ctz (len));
6956 if (dest_addr == 0)
6958 dest_addr = force_operand (XEXP (dest_mem, 0), NULL_RTX);
6959 dest_addr = convert_memory_address (ptr_mode, dest_addr);
6962 return dest_addr;
6964 do_libcall:
6965 fndecl = get_callee_fndecl (orig_exp);
6966 fcode = DECL_FUNCTION_CODE (fndecl);
6967 if (fcode == BUILT_IN_MEMSET)
6968 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 3,
6969 dest, val, len);
6970 else if (fcode == BUILT_IN_BZERO)
6971 fn = build_call_nofold_loc (EXPR_LOCATION (orig_exp), fndecl, 2,
6972 dest, len);
6973 else
6974 gcc_unreachable ();
6975 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
6976 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (orig_exp);
6977 return expand_call (fn, target, target == const0_rtx);
6980 /* Expand expression EXP, which is a call to the bzero builtin. Return
6981 NULL_RTX if we failed the caller should emit a normal call. */
6983 static rtx
6984 expand_builtin_bzero (tree exp)
6986 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
6987 return NULL_RTX;
6989 tree dest = CALL_EXPR_ARG (exp, 0);
6990 tree size = CALL_EXPR_ARG (exp, 1);
6992 check_memop_access (exp, dest, NULL_TREE, size);
6994 /* New argument list transforming bzero(ptr x, int y) to
6995 memset(ptr x, int 0, size_t y). This is done this way
6996 so that if it isn't expanded inline, we fallback to
6997 calling bzero instead of memset. */
6999 location_t loc = EXPR_LOCATION (exp);
7001 return expand_builtin_memset_args (dest, integer_zero_node,
7002 fold_convert_loc (loc,
7003 size_type_node, size),
7004 const0_rtx, VOIDmode, exp);
7007 /* Try to expand cmpstr operation ICODE with the given operands.
7008 Return the result rtx on success, otherwise return null. */
7010 static rtx
7011 expand_cmpstr (insn_code icode, rtx target, rtx arg1_rtx, rtx arg2_rtx,
7012 HOST_WIDE_INT align)
7014 machine_mode insn_mode = insn_data[icode].operand[0].mode;
7016 if (target && (!REG_P (target) || HARD_REGISTER_P (target)))
7017 target = NULL_RTX;
7019 class expand_operand ops[4];
7020 create_output_operand (&ops[0], target, insn_mode);
7021 create_fixed_operand (&ops[1], arg1_rtx);
7022 create_fixed_operand (&ops[2], arg2_rtx);
7023 create_integer_operand (&ops[3], align);
7024 if (maybe_expand_insn (icode, 4, ops))
7025 return ops[0].value;
7026 return NULL_RTX;
7029 /* Expand expression EXP, which is a call to the memcmp built-in function.
7030 Return NULL_RTX if we failed and the caller should emit a normal call,
7031 otherwise try to get the result in TARGET, if convenient.
7032 RESULT_EQ is true if we can relax the returned value to be either zero
7033 or nonzero, without caring about the sign. */
7035 static rtx
7036 expand_builtin_memcmp (tree exp, rtx target, bool result_eq)
7038 if (!validate_arglist (exp,
7039 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7040 return NULL_RTX;
7042 tree arg1 = CALL_EXPR_ARG (exp, 0);
7043 tree arg2 = CALL_EXPR_ARG (exp, 1);
7044 tree len = CALL_EXPR_ARG (exp, 2);
7046 /* Diagnose calls where the specified length exceeds the size of either
7047 object. */
7048 if (!check_read_access (exp, arg1, len, 0)
7049 || !check_read_access (exp, arg2, len, 0))
7050 return NULL_RTX;
7052 /* Due to the performance benefit, always inline the calls first
7053 when result_eq is false. */
7054 rtx result = NULL_RTX;
7055 enum built_in_function fcode = DECL_FUNCTION_CODE (get_callee_fndecl (exp));
7056 if (!result_eq && fcode != BUILT_IN_BCMP)
7058 result = inline_expand_builtin_bytecmp (exp, target);
7059 if (result)
7060 return result;
7063 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
7064 location_t loc = EXPR_LOCATION (exp);
7066 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
7067 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
7069 /* If we don't have POINTER_TYPE, call the function. */
7070 if (arg1_align == 0 || arg2_align == 0)
7071 return NULL_RTX;
7073 rtx arg1_rtx = get_memory_rtx (arg1, len);
7074 rtx arg2_rtx = get_memory_rtx (arg2, len);
7075 rtx len_rtx = expand_normal (fold_convert_loc (loc, sizetype, len));
7077 /* Set MEM_SIZE as appropriate. */
7078 if (CONST_INT_P (len_rtx))
7080 set_mem_size (arg1_rtx, INTVAL (len_rtx));
7081 set_mem_size (arg2_rtx, INTVAL (len_rtx));
7084 by_pieces_constfn constfn = NULL;
7086 /* Try to get the byte representation of the constant ARG2 (or, only
7087 when the function's result is used for equality to zero, ARG1)
7088 points to, with its byte size in NBYTES. */
7089 unsigned HOST_WIDE_INT nbytes;
7090 const char *rep = getbyterep (arg2, &nbytes);
7091 if (result_eq && rep == NULL)
7093 /* For equality to zero the arguments are interchangeable. */
7094 rep = getbyterep (arg1, &nbytes);
7095 if (rep != NULL)
7096 std::swap (arg1_rtx, arg2_rtx);
7099 /* If the function's constant bound LEN_RTX is less than or equal
7100 to the byte size of the representation of the constant argument,
7101 and if block move would be done by pieces, we can avoid loading
7102 the bytes from memory and only store the computed constant result. */
7103 if (rep
7104 && CONST_INT_P (len_rtx)
7105 && (unsigned HOST_WIDE_INT) INTVAL (len_rtx) <= nbytes)
7106 constfn = builtin_memcpy_read_str;
7108 result = emit_block_cmp_hints (arg1_rtx, arg2_rtx, len_rtx,
7109 TREE_TYPE (len), target,
7110 result_eq, constfn,
7111 CONST_CAST (char *, rep));
7113 if (result)
7115 /* Return the value in the proper mode for this function. */
7116 if (GET_MODE (result) == mode)
7117 return result;
7119 if (target != 0)
7121 convert_move (target, result, 0);
7122 return target;
7125 return convert_to_mode (mode, result, 0);
7128 return NULL_RTX;
7131 /* Expand expression EXP, which is a call to the strcmp builtin. Return NULL_RTX
7132 if we failed the caller should emit a normal call, otherwise try to get
7133 the result in TARGET, if convenient. */
7135 static rtx
7136 expand_builtin_strcmp (tree exp, ATTRIBUTE_UNUSED rtx target)
7138 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
7139 return NULL_RTX;
7141 tree arg1 = CALL_EXPR_ARG (exp, 0);
7142 tree arg2 = CALL_EXPR_ARG (exp, 1);
7144 if (!check_read_access (exp, arg1)
7145 || !check_read_access (exp, arg2))
7146 return NULL_RTX;
7148 /* Due to the performance benefit, always inline the calls first. */
7149 rtx result = NULL_RTX;
7150 result = inline_expand_builtin_bytecmp (exp, target);
7151 if (result)
7152 return result;
7154 insn_code cmpstr_icode = direct_optab_handler (cmpstr_optab, SImode);
7155 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
7156 if (cmpstr_icode == CODE_FOR_nothing && cmpstrn_icode == CODE_FOR_nothing)
7157 return NULL_RTX;
7159 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
7160 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
7162 /* If we don't have POINTER_TYPE, call the function. */
7163 if (arg1_align == 0 || arg2_align == 0)
7164 return NULL_RTX;
7166 /* Stabilize the arguments in case gen_cmpstr(n)si fail. */
7167 arg1 = builtin_save_expr (arg1);
7168 arg2 = builtin_save_expr (arg2);
7170 rtx arg1_rtx = get_memory_rtx (arg1, NULL);
7171 rtx arg2_rtx = get_memory_rtx (arg2, NULL);
7173 /* Try to call cmpstrsi. */
7174 if (cmpstr_icode != CODE_FOR_nothing)
7175 result = expand_cmpstr (cmpstr_icode, target, arg1_rtx, arg2_rtx,
7176 MIN (arg1_align, arg2_align));
7178 /* Try to determine at least one length and call cmpstrnsi. */
7179 if (!result && cmpstrn_icode != CODE_FOR_nothing)
7181 tree len;
7182 rtx arg3_rtx;
7184 tree len1 = c_strlen (arg1, 1);
7185 tree len2 = c_strlen (arg2, 1);
7187 if (len1)
7188 len1 = size_binop (PLUS_EXPR, ssize_int (1), len1);
7189 if (len2)
7190 len2 = size_binop (PLUS_EXPR, ssize_int (1), len2);
7192 /* If we don't have a constant length for the first, use the length
7193 of the second, if we know it. We don't require a constant for
7194 this case; some cost analysis could be done if both are available
7195 but neither is constant. For now, assume they're equally cheap,
7196 unless one has side effects. If both strings have constant lengths,
7197 use the smaller. */
7199 if (!len1)
7200 len = len2;
7201 else if (!len2)
7202 len = len1;
7203 else if (TREE_SIDE_EFFECTS (len1))
7204 len = len2;
7205 else if (TREE_SIDE_EFFECTS (len2))
7206 len = len1;
7207 else if (TREE_CODE (len1) != INTEGER_CST)
7208 len = len2;
7209 else if (TREE_CODE (len2) != INTEGER_CST)
7210 len = len1;
7211 else if (tree_int_cst_lt (len1, len2))
7212 len = len1;
7213 else
7214 len = len2;
7216 /* If both arguments have side effects, we cannot optimize. */
7217 if (len && !TREE_SIDE_EFFECTS (len))
7219 arg3_rtx = expand_normal (len);
7220 result = expand_cmpstrn_or_cmpmem
7221 (cmpstrn_icode, target, arg1_rtx, arg2_rtx, TREE_TYPE (len),
7222 arg3_rtx, MIN (arg1_align, arg2_align));
7226 tree fndecl = get_callee_fndecl (exp);
7227 if (result)
7229 /* Check to see if the argument was declared attribute nonstring
7230 and if so, issue a warning since at this point it's not known
7231 to be nul-terminated. */
7232 maybe_warn_nonstring_arg (fndecl, exp);
7234 /* Return the value in the proper mode for this function. */
7235 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
7236 if (GET_MODE (result) == mode)
7237 return result;
7238 if (target == 0)
7239 return convert_to_mode (mode, result, 0);
7240 convert_move (target, result, 0);
7241 return target;
7244 /* Expand the library call ourselves using a stabilized argument
7245 list to avoid re-evaluating the function's arguments twice. */
7246 tree fn = build_call_nofold_loc (EXPR_LOCATION (exp), fndecl, 2, arg1, arg2);
7247 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
7248 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
7249 return expand_call (fn, target, target == const0_rtx);
7252 /* Expand expression EXP, which is a call to the strncmp builtin. Return
7253 NULL_RTX if we failed the caller should emit a normal call, otherwise
7254 try to get the result in TARGET, if convenient. */
7256 static rtx
7257 expand_builtin_strncmp (tree exp, ATTRIBUTE_UNUSED rtx target,
7258 ATTRIBUTE_UNUSED machine_mode mode)
7260 if (!validate_arglist (exp,
7261 POINTER_TYPE, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
7262 return NULL_RTX;
7264 tree arg1 = CALL_EXPR_ARG (exp, 0);
7265 tree arg2 = CALL_EXPR_ARG (exp, 1);
7266 tree arg3 = CALL_EXPR_ARG (exp, 2);
7268 if (!check_nul_terminated_array (exp, arg1, arg3)
7269 || !check_nul_terminated_array (exp, arg2, arg3))
7270 return NULL_RTX;
7272 location_t loc = tree_inlined_location (exp);
7273 tree len1 = c_strlen (arg1, 1);
7274 tree len2 = c_strlen (arg2, 1);
7276 if (!len1 || !len2)
7278 /* Check to see if the argument was declared attribute nonstring
7279 and if so, issue a warning since at this point it's not known
7280 to be nul-terminated. */
7281 if (!maybe_warn_nonstring_arg (get_callee_fndecl (exp), exp)
7282 && !len1 && !len2)
7284 /* A strncmp read is constrained not just by the bound but
7285 also by the length of the shorter string. Specifying
7286 a bound that's larger than the size of either array makes
7287 no sense and is likely a bug. When the length of neither
7288 of the two strings is known but the sizes of both of
7289 the arrays they are stored in is, issue a warning if
7290 the bound is larger than than the size of the larger
7291 of the two arrays. */
7293 access_ref ref1 (arg3, true);
7294 access_ref ref2 (arg3, true);
7296 tree bndrng[2] = { NULL_TREE, NULL_TREE };
7297 get_size_range (arg3, bndrng, ref1.bndrng);
7299 tree size1 = compute_objsize (arg1, 1, &ref1);
7300 tree size2 = compute_objsize (arg2, 1, &ref2);
7301 tree func = get_callee_fndecl (exp);
7303 if (size1 && size2 && bndrng[0] && !integer_zerop (bndrng[0]))
7305 offset_int rem1 = ref1.size_remaining ();
7306 offset_int rem2 = ref2.size_remaining ();
7307 if (rem1 == 0 || rem2 == 0)
7308 maybe_warn_for_bound (OPT_Wstringop_overread, loc, exp, func,
7309 bndrng, integer_zero_node);
7310 else
7312 offset_int maxrem = wi::max (rem1, rem2, UNSIGNED);
7313 if (maxrem < wi::to_offset (bndrng[0]))
7314 maybe_warn_for_bound (OPT_Wstringop_overread, loc, exp,
7315 func, bndrng,
7316 wide_int_to_tree (sizetype, maxrem));
7319 else if (bndrng[0]
7320 && !integer_zerop (bndrng[0])
7321 && ((size1 && integer_zerop (size1))
7322 || (size2 && integer_zerop (size2))))
7323 maybe_warn_for_bound (OPT_Wstringop_overread, loc, exp, func,
7324 bndrng, integer_zero_node);
7328 /* Due to the performance benefit, always inline the calls first. */
7329 rtx result = NULL_RTX;
7330 result = inline_expand_builtin_bytecmp (exp, target);
7331 if (result)
7332 return result;
7334 /* If c_strlen can determine an expression for one of the string
7335 lengths, and it doesn't have side effects, then emit cmpstrnsi
7336 using length MIN(strlen(string)+1, arg3). */
7337 insn_code cmpstrn_icode = direct_optab_handler (cmpstrn_optab, SImode);
7338 if (cmpstrn_icode == CODE_FOR_nothing)
7339 return NULL_RTX;
7341 tree len;
7343 unsigned int arg1_align = get_pointer_alignment (arg1) / BITS_PER_UNIT;
7344 unsigned int arg2_align = get_pointer_alignment (arg2) / BITS_PER_UNIT;
7346 if (len1)
7347 len1 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len1);
7348 if (len2)
7349 len2 = size_binop_loc (loc, PLUS_EXPR, ssize_int (1), len2);
7351 tree len3 = fold_convert_loc (loc, sizetype, arg3);
7353 /* If we don't have a constant length for the first, use the length
7354 of the second, if we know it. If neither string is constant length,
7355 use the given length argument. We don't require a constant for
7356 this case; some cost analysis could be done if both are available
7357 but neither is constant. For now, assume they're equally cheap,
7358 unless one has side effects. If both strings have constant lengths,
7359 use the smaller. */
7361 if (!len1 && !len2)
7362 len = len3;
7363 else if (!len1)
7364 len = len2;
7365 else if (!len2)
7366 len = len1;
7367 else if (TREE_SIDE_EFFECTS (len1))
7368 len = len2;
7369 else if (TREE_SIDE_EFFECTS (len2))
7370 len = len1;
7371 else if (TREE_CODE (len1) != INTEGER_CST)
7372 len = len2;
7373 else if (TREE_CODE (len2) != INTEGER_CST)
7374 len = len1;
7375 else if (tree_int_cst_lt (len1, len2))
7376 len = len1;
7377 else
7378 len = len2;
7380 /* If we are not using the given length, we must incorporate it here.
7381 The actual new length parameter will be MIN(len,arg3) in this case. */
7382 if (len != len3)
7384 len = fold_convert_loc (loc, sizetype, len);
7385 len = fold_build2_loc (loc, MIN_EXPR, TREE_TYPE (len), len, len3);
7387 rtx arg1_rtx = get_memory_rtx (arg1, len);
7388 rtx arg2_rtx = get_memory_rtx (arg2, len);
7389 rtx arg3_rtx = expand_normal (len);
7390 result = expand_cmpstrn_or_cmpmem (cmpstrn_icode, target, arg1_rtx,
7391 arg2_rtx, TREE_TYPE (len), arg3_rtx,
7392 MIN (arg1_align, arg2_align));
7394 tree fndecl = get_callee_fndecl (exp);
7395 if (result)
7397 /* Return the value in the proper mode for this function. */
7398 mode = TYPE_MODE (TREE_TYPE (exp));
7399 if (GET_MODE (result) == mode)
7400 return result;
7401 if (target == 0)
7402 return convert_to_mode (mode, result, 0);
7403 convert_move (target, result, 0);
7404 return target;
7407 /* Expand the library call ourselves using a stabilized argument
7408 list to avoid re-evaluating the function's arguments twice. */
7409 tree call = build_call_nofold_loc (loc, fndecl, 3, arg1, arg2, len);
7410 if (TREE_NO_WARNING (exp))
7411 TREE_NO_WARNING (call) = true;
7412 gcc_assert (TREE_CODE (call) == CALL_EXPR);
7413 CALL_EXPR_TAILCALL (call) = CALL_EXPR_TAILCALL (exp);
7414 return expand_call (call, target, target == const0_rtx);
7417 /* Expand a call to __builtin_saveregs, generating the result in TARGET,
7418 if that's convenient. */
7421 expand_builtin_saveregs (void)
7423 rtx val;
7424 rtx_insn *seq;
7426 /* Don't do __builtin_saveregs more than once in a function.
7427 Save the result of the first call and reuse it. */
7428 if (saveregs_value != 0)
7429 return saveregs_value;
7431 /* When this function is called, it means that registers must be
7432 saved on entry to this function. So we migrate the call to the
7433 first insn of this function. */
7435 start_sequence ();
7437 /* Do whatever the machine needs done in this case. */
7438 val = targetm.calls.expand_builtin_saveregs ();
7440 seq = get_insns ();
7441 end_sequence ();
7443 saveregs_value = val;
7445 /* Put the insns after the NOTE that starts the function. If this
7446 is inside a start_sequence, make the outer-level insn chain current, so
7447 the code is placed at the start of the function. */
7448 push_topmost_sequence ();
7449 emit_insn_after (seq, entry_of_function ());
7450 pop_topmost_sequence ();
7452 return val;
7455 /* Expand a call to __builtin_next_arg. */
7457 static rtx
7458 expand_builtin_next_arg (void)
7460 /* Checking arguments is already done in fold_builtin_next_arg
7461 that must be called before this function. */
7462 return expand_binop (ptr_mode, add_optab,
7463 crtl->args.internal_arg_pointer,
7464 crtl->args.arg_offset_rtx,
7465 NULL_RTX, 0, OPTAB_LIB_WIDEN);
7468 /* Make it easier for the backends by protecting the valist argument
7469 from multiple evaluations. */
7471 static tree
7472 stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
7474 tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
7476 /* The current way of determining the type of valist is completely
7477 bogus. We should have the information on the va builtin instead. */
7478 if (!vatype)
7479 vatype = targetm.fn_abi_va_list (cfun->decl);
7481 if (TREE_CODE (vatype) == ARRAY_TYPE)
7483 if (TREE_SIDE_EFFECTS (valist))
7484 valist = save_expr (valist);
7486 /* For this case, the backends will be expecting a pointer to
7487 vatype, but it's possible we've actually been given an array
7488 (an actual TARGET_CANONICAL_VA_LIST_TYPE (valist)).
7489 So fix it. */
7490 if (TREE_CODE (TREE_TYPE (valist)) == ARRAY_TYPE)
7492 tree p1 = build_pointer_type (TREE_TYPE (vatype));
7493 valist = build_fold_addr_expr_with_type_loc (loc, valist, p1);
7496 else
7498 tree pt = build_pointer_type (vatype);
7500 if (! needs_lvalue)
7502 if (! TREE_SIDE_EFFECTS (valist))
7503 return valist;
7505 valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
7506 TREE_SIDE_EFFECTS (valist) = 1;
7509 if (TREE_SIDE_EFFECTS (valist))
7510 valist = save_expr (valist);
7511 valist = fold_build2_loc (loc, MEM_REF,
7512 vatype, valist, build_int_cst (pt, 0));
7515 return valist;
7518 /* The "standard" definition of va_list is void*. */
7520 tree
7521 std_build_builtin_va_list (void)
7523 return ptr_type_node;
7526 /* The "standard" abi va_list is va_list_type_node. */
7528 tree
7529 std_fn_abi_va_list (tree fndecl ATTRIBUTE_UNUSED)
7531 return va_list_type_node;
7534 /* The "standard" type of va_list is va_list_type_node. */
7536 tree
7537 std_canonical_va_list_type (tree type)
7539 tree wtype, htype;
7541 wtype = va_list_type_node;
7542 htype = type;
7544 if (TREE_CODE (wtype) == ARRAY_TYPE)
7546 /* If va_list is an array type, the argument may have decayed
7547 to a pointer type, e.g. by being passed to another function.
7548 In that case, unwrap both types so that we can compare the
7549 underlying records. */
7550 if (TREE_CODE (htype) == ARRAY_TYPE
7551 || POINTER_TYPE_P (htype))
7553 wtype = TREE_TYPE (wtype);
7554 htype = TREE_TYPE (htype);
7557 if (TYPE_MAIN_VARIANT (wtype) == TYPE_MAIN_VARIANT (htype))
7558 return va_list_type_node;
7560 return NULL_TREE;
7563 /* The "standard" implementation of va_start: just assign `nextarg' to
7564 the variable. */
7566 void
7567 std_expand_builtin_va_start (tree valist, rtx nextarg)
7569 rtx va_r = expand_expr (valist, NULL_RTX, VOIDmode, EXPAND_WRITE);
7570 convert_move (va_r, nextarg, 0);
7573 /* Expand EXP, a call to __builtin_va_start. */
7575 static rtx
7576 expand_builtin_va_start (tree exp)
7578 rtx nextarg;
7579 tree valist;
7580 location_t loc = EXPR_LOCATION (exp);
7582 if (call_expr_nargs (exp) < 2)
7584 error_at (loc, "too few arguments to function %<va_start%>");
7585 return const0_rtx;
7588 if (fold_builtin_next_arg (exp, true))
7589 return const0_rtx;
7591 nextarg = expand_builtin_next_arg ();
7592 valist = stabilize_va_list_loc (loc, CALL_EXPR_ARG (exp, 0), 1);
7594 if (targetm.expand_builtin_va_start)
7595 targetm.expand_builtin_va_start (valist, nextarg);
7596 else
7597 std_expand_builtin_va_start (valist, nextarg);
7599 return const0_rtx;
7602 /* Expand EXP, a call to __builtin_va_end. */
7604 static rtx
7605 expand_builtin_va_end (tree exp)
7607 tree valist = CALL_EXPR_ARG (exp, 0);
7609 /* Evaluate for side effects, if needed. I hate macros that don't
7610 do that. */
7611 if (TREE_SIDE_EFFECTS (valist))
7612 expand_expr (valist, const0_rtx, VOIDmode, EXPAND_NORMAL);
7614 return const0_rtx;
7617 /* Expand EXP, a call to __builtin_va_copy. We do this as a
7618 builtin rather than just as an assignment in stdarg.h because of the
7619 nastiness of array-type va_list types. */
7621 static rtx
7622 expand_builtin_va_copy (tree exp)
7624 tree dst, src, t;
7625 location_t loc = EXPR_LOCATION (exp);
7627 dst = CALL_EXPR_ARG (exp, 0);
7628 src = CALL_EXPR_ARG (exp, 1);
7630 dst = stabilize_va_list_loc (loc, dst, 1);
7631 src = stabilize_va_list_loc (loc, src, 0);
7633 gcc_assert (cfun != NULL && cfun->decl != NULL_TREE);
7635 if (TREE_CODE (targetm.fn_abi_va_list (cfun->decl)) != ARRAY_TYPE)
7637 t = build2 (MODIFY_EXPR, targetm.fn_abi_va_list (cfun->decl), dst, src);
7638 TREE_SIDE_EFFECTS (t) = 1;
7639 expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
7641 else
7643 rtx dstb, srcb, size;
7645 /* Evaluate to pointers. */
7646 dstb = expand_expr (dst, NULL_RTX, Pmode, EXPAND_NORMAL);
7647 srcb = expand_expr (src, NULL_RTX, Pmode, EXPAND_NORMAL);
7648 size = expand_expr (TYPE_SIZE_UNIT (targetm.fn_abi_va_list (cfun->decl)),
7649 NULL_RTX, VOIDmode, EXPAND_NORMAL);
7651 dstb = convert_memory_address (Pmode, dstb);
7652 srcb = convert_memory_address (Pmode, srcb);
7654 /* "Dereference" to BLKmode memories. */
7655 dstb = gen_rtx_MEM (BLKmode, dstb);
7656 set_mem_alias_set (dstb, get_alias_set (TREE_TYPE (TREE_TYPE (dst))));
7657 set_mem_align (dstb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
7658 srcb = gen_rtx_MEM (BLKmode, srcb);
7659 set_mem_alias_set (srcb, get_alias_set (TREE_TYPE (TREE_TYPE (src))));
7660 set_mem_align (srcb, TYPE_ALIGN (targetm.fn_abi_va_list (cfun->decl)));
7662 /* Copy. */
7663 emit_block_move (dstb, srcb, size, BLOCK_OP_NORMAL);
7666 return const0_rtx;
7669 /* Expand a call to one of the builtin functions __builtin_frame_address or
7670 __builtin_return_address. */
7672 static rtx
7673 expand_builtin_frame_address (tree fndecl, tree exp)
7675 /* The argument must be a nonnegative integer constant.
7676 It counts the number of frames to scan up the stack.
7677 The value is either the frame pointer value or the return
7678 address saved in that frame. */
7679 if (call_expr_nargs (exp) == 0)
7680 /* Warning about missing arg was already issued. */
7681 return const0_rtx;
7682 else if (! tree_fits_uhwi_p (CALL_EXPR_ARG (exp, 0)))
7684 error ("invalid argument to %qD", fndecl);
7685 return const0_rtx;
7687 else
7689 /* Number of frames to scan up the stack. */
7690 unsigned HOST_WIDE_INT count = tree_to_uhwi (CALL_EXPR_ARG (exp, 0));
7692 rtx tem = expand_builtin_return_addr (DECL_FUNCTION_CODE (fndecl), count);
7694 /* Some ports cannot access arbitrary stack frames. */
7695 if (tem == NULL)
7697 warning (0, "unsupported argument to %qD", fndecl);
7698 return const0_rtx;
7701 if (count)
7703 /* Warn since no effort is made to ensure that any frame
7704 beyond the current one exists or can be safely reached. */
7705 warning (OPT_Wframe_address, "calling %qD with "
7706 "a nonzero argument is unsafe", fndecl);
7709 /* For __builtin_frame_address, return what we've got. */
7710 if (DECL_FUNCTION_CODE (fndecl) == BUILT_IN_FRAME_ADDRESS)
7711 return tem;
7713 if (!REG_P (tem)
7714 && ! CONSTANT_P (tem))
7715 tem = copy_addr_to_reg (tem);
7716 return tem;
7720 /* Expand EXP, a call to the alloca builtin. Return NULL_RTX if we
7721 failed and the caller should emit a normal call. */
7723 static rtx
7724 expand_builtin_alloca (tree exp)
7726 rtx op0;
7727 rtx result;
7728 unsigned int align;
7729 tree fndecl = get_callee_fndecl (exp);
7730 HOST_WIDE_INT max_size;
7731 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
7732 bool alloca_for_var = CALL_ALLOCA_FOR_VAR_P (exp);
7733 bool valid_arglist
7734 = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
7735 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, INTEGER_TYPE,
7736 VOID_TYPE)
7737 : fcode == BUILT_IN_ALLOCA_WITH_ALIGN
7738 ? validate_arglist (exp, INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE)
7739 : validate_arglist (exp, INTEGER_TYPE, VOID_TYPE));
7741 if (!valid_arglist)
7742 return NULL_RTX;
7744 if ((alloca_for_var
7745 && warn_vla_limit >= HOST_WIDE_INT_MAX
7746 && warn_alloc_size_limit < warn_vla_limit)
7747 || (!alloca_for_var
7748 && warn_alloca_limit >= HOST_WIDE_INT_MAX
7749 && warn_alloc_size_limit < warn_alloca_limit
7752 /* -Walloca-larger-than and -Wvla-larger-than settings of
7753 less than HOST_WIDE_INT_MAX override the more general
7754 -Walloc-size-larger-than so unless either of the former
7755 options is smaller than the last one (wchich would imply
7756 that the call was already checked), check the alloca
7757 arguments for overflow. */
7758 tree args[] = { CALL_EXPR_ARG (exp, 0), NULL_TREE };
7759 int idx[] = { 0, -1 };
7760 maybe_warn_alloc_args_overflow (fndecl, exp, args, idx);
7763 /* Compute the argument. */
7764 op0 = expand_normal (CALL_EXPR_ARG (exp, 0));
7766 /* Compute the alignment. */
7767 align = (fcode == BUILT_IN_ALLOCA
7768 ? BIGGEST_ALIGNMENT
7769 : TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 1)));
7771 /* Compute the maximum size. */
7772 max_size = (fcode == BUILT_IN_ALLOCA_WITH_ALIGN_AND_MAX
7773 ? TREE_INT_CST_LOW (CALL_EXPR_ARG (exp, 2))
7774 : -1);
7776 /* Allocate the desired space. If the allocation stems from the declaration
7777 of a variable-sized object, it cannot accumulate. */
7778 result
7779 = allocate_dynamic_stack_space (op0, 0, align, max_size, alloca_for_var);
7780 result = convert_memory_address (ptr_mode, result);
7782 /* Dynamic allocations for variables are recorded during gimplification. */
7783 if (!alloca_for_var && (flag_callgraph_info & CALLGRAPH_INFO_DYNAMIC_ALLOC))
7784 record_dynamic_alloc (exp);
7786 return result;
7789 /* Emit a call to __asan_allocas_unpoison call in EXP. Add to second argument
7790 of the call virtual_stack_dynamic_rtx - stack_pointer_rtx, which is the
7791 STACK_DYNAMIC_OFFSET value. See motivation for this in comment to
7792 handle_builtin_stack_restore function. */
7794 static rtx
7795 expand_asan_emit_allocas_unpoison (tree exp)
7797 tree arg0 = CALL_EXPR_ARG (exp, 0);
7798 tree arg1 = CALL_EXPR_ARG (exp, 1);
7799 rtx top = expand_expr (arg0, NULL_RTX, ptr_mode, EXPAND_NORMAL);
7800 rtx bot = expand_expr (arg1, NULL_RTX, ptr_mode, EXPAND_NORMAL);
7801 rtx off = expand_simple_binop (Pmode, MINUS, virtual_stack_dynamic_rtx,
7802 stack_pointer_rtx, NULL_RTX, 0,
7803 OPTAB_LIB_WIDEN);
7804 off = convert_modes (ptr_mode, Pmode, off, 0);
7805 bot = expand_simple_binop (ptr_mode, PLUS, bot, off, NULL_RTX, 0,
7806 OPTAB_LIB_WIDEN);
7807 rtx ret = init_one_libfunc ("__asan_allocas_unpoison");
7808 ret = emit_library_call_value (ret, NULL_RTX, LCT_NORMAL, ptr_mode,
7809 top, ptr_mode, bot, ptr_mode);
7810 return ret;
7813 /* Expand a call to bswap builtin in EXP.
7814 Return NULL_RTX if a normal call should be emitted rather than expanding the
7815 function in-line. If convenient, the result should be placed in TARGET.
7816 SUBTARGET may be used as the target for computing one of EXP's operands. */
7818 static rtx
7819 expand_builtin_bswap (machine_mode target_mode, tree exp, rtx target,
7820 rtx subtarget)
7822 tree arg;
7823 rtx op0;
7825 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
7826 return NULL_RTX;
7828 arg = CALL_EXPR_ARG (exp, 0);
7829 op0 = expand_expr (arg,
7830 subtarget && GET_MODE (subtarget) == target_mode
7831 ? subtarget : NULL_RTX,
7832 target_mode, EXPAND_NORMAL);
7833 if (GET_MODE (op0) != target_mode)
7834 op0 = convert_to_mode (target_mode, op0, 1);
7836 target = expand_unop (target_mode, bswap_optab, op0, target, 1);
7838 gcc_assert (target);
7840 return convert_to_mode (target_mode, target, 1);
7843 /* Expand a call to a unary builtin in EXP.
7844 Return NULL_RTX if a normal call should be emitted rather than expanding the
7845 function in-line. If convenient, the result should be placed in TARGET.
7846 SUBTARGET may be used as the target for computing one of EXP's operands. */
7848 static rtx
7849 expand_builtin_unop (machine_mode target_mode, tree exp, rtx target,
7850 rtx subtarget, optab op_optab)
7852 rtx op0;
7854 if (!validate_arglist (exp, INTEGER_TYPE, VOID_TYPE))
7855 return NULL_RTX;
7857 /* Compute the argument. */
7858 op0 = expand_expr (CALL_EXPR_ARG (exp, 0),
7859 (subtarget
7860 && (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0)))
7861 == GET_MODE (subtarget))) ? subtarget : NULL_RTX,
7862 VOIDmode, EXPAND_NORMAL);
7863 /* Compute op, into TARGET if possible.
7864 Set TARGET to wherever the result comes back. */
7865 target = expand_unop (TYPE_MODE (TREE_TYPE (CALL_EXPR_ARG (exp, 0))),
7866 op_optab, op0, target, op_optab != clrsb_optab);
7867 gcc_assert (target);
7869 return convert_to_mode (target_mode, target, 0);
7872 /* Expand a call to __builtin_expect. We just return our argument
7873 as the builtin_expect semantic should've been already executed by
7874 tree branch prediction pass. */
7876 static rtx
7877 expand_builtin_expect (tree exp, rtx target)
7879 tree arg;
7881 if (call_expr_nargs (exp) < 2)
7882 return const0_rtx;
7883 arg = CALL_EXPR_ARG (exp, 0);
7885 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
7886 /* When guessing was done, the hints should be already stripped away. */
7887 gcc_assert (!flag_guess_branch_prob
7888 || optimize == 0 || seen_error ());
7889 return target;
7892 /* Expand a call to __builtin_expect_with_probability. We just return our
7893 argument as the builtin_expect semantic should've been already executed by
7894 tree branch prediction pass. */
7896 static rtx
7897 expand_builtin_expect_with_probability (tree exp, rtx target)
7899 tree arg;
7901 if (call_expr_nargs (exp) < 3)
7902 return const0_rtx;
7903 arg = CALL_EXPR_ARG (exp, 0);
7905 target = expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
7906 /* When guessing was done, the hints should be already stripped away. */
7907 gcc_assert (!flag_guess_branch_prob
7908 || optimize == 0 || seen_error ());
7909 return target;
7913 /* Expand a call to __builtin_assume_aligned. We just return our first
7914 argument as the builtin_assume_aligned semantic should've been already
7915 executed by CCP. */
7917 static rtx
7918 expand_builtin_assume_aligned (tree exp, rtx target)
7920 if (call_expr_nargs (exp) < 2)
7921 return const0_rtx;
7922 target = expand_expr (CALL_EXPR_ARG (exp, 0), target, VOIDmode,
7923 EXPAND_NORMAL);
7924 gcc_assert (!TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 1))
7925 && (call_expr_nargs (exp) < 3
7926 || !TREE_SIDE_EFFECTS (CALL_EXPR_ARG (exp, 2))));
7927 return target;
7930 void
7931 expand_builtin_trap (void)
7933 if (targetm.have_trap ())
7935 rtx_insn *insn = emit_insn (targetm.gen_trap ());
7936 /* For trap insns when not accumulating outgoing args force
7937 REG_ARGS_SIZE note to prevent crossjumping of calls with
7938 different args sizes. */
7939 if (!ACCUMULATE_OUTGOING_ARGS)
7940 add_args_size_note (insn, stack_pointer_delta);
7942 else
7944 tree fn = builtin_decl_implicit (BUILT_IN_ABORT);
7945 tree call_expr = build_call_expr (fn, 0);
7946 expand_call (call_expr, NULL_RTX, false);
7949 emit_barrier ();
7952 /* Expand a call to __builtin_unreachable. We do nothing except emit
7953 a barrier saying that control flow will not pass here.
7955 It is the responsibility of the program being compiled to ensure
7956 that control flow does never reach __builtin_unreachable. */
7957 static void
7958 expand_builtin_unreachable (void)
7960 emit_barrier ();
7963 /* Expand EXP, a call to fabs, fabsf or fabsl.
7964 Return NULL_RTX if a normal call should be emitted rather than expanding
7965 the function inline. If convenient, the result should be placed
7966 in TARGET. SUBTARGET may be used as the target for computing
7967 the operand. */
7969 static rtx
7970 expand_builtin_fabs (tree exp, rtx target, rtx subtarget)
7972 machine_mode mode;
7973 tree arg;
7974 rtx op0;
7976 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
7977 return NULL_RTX;
7979 arg = CALL_EXPR_ARG (exp, 0);
7980 CALL_EXPR_ARG (exp, 0) = arg = builtin_save_expr (arg);
7981 mode = TYPE_MODE (TREE_TYPE (arg));
7982 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
7983 return expand_abs (mode, op0, target, 0, safe_from_p (target, arg, 1));
7986 /* Expand EXP, a call to copysign, copysignf, or copysignl.
7987 Return NULL is a normal call should be emitted rather than expanding the
7988 function inline. If convenient, the result should be placed in TARGET.
7989 SUBTARGET may be used as the target for computing the operand. */
7991 static rtx
7992 expand_builtin_copysign (tree exp, rtx target, rtx subtarget)
7994 rtx op0, op1;
7995 tree arg;
7997 if (!validate_arglist (exp, REAL_TYPE, REAL_TYPE, VOID_TYPE))
7998 return NULL_RTX;
8000 arg = CALL_EXPR_ARG (exp, 0);
8001 op0 = expand_expr (arg, subtarget, VOIDmode, EXPAND_NORMAL);
8003 arg = CALL_EXPR_ARG (exp, 1);
8004 op1 = expand_normal (arg);
8006 return expand_copysign (op0, op1, target);
8009 /* Emit a call to __builtin___clear_cache. */
8011 void
8012 default_emit_call_builtin___clear_cache (rtx begin, rtx end)
8014 rtx callee = gen_rtx_SYMBOL_REF (Pmode,
8015 BUILTIN_ASM_NAME_PTR
8016 (BUILT_IN_CLEAR_CACHE));
8018 emit_library_call (callee,
8019 LCT_NORMAL, VOIDmode,
8020 convert_memory_address (ptr_mode, begin), ptr_mode,
8021 convert_memory_address (ptr_mode, end), ptr_mode);
8024 /* Emit a call to __builtin___clear_cache, unless the target specifies
8025 it as do-nothing. This function can be used by trampoline
8026 finalizers to duplicate the effects of expanding a call to the
8027 clear_cache builtin. */
8029 void
8030 maybe_emit_call_builtin___clear_cache (rtx begin, rtx end)
8032 if ((GET_MODE (begin) != ptr_mode && GET_MODE (begin) != Pmode)
8033 || (GET_MODE (end) != ptr_mode && GET_MODE (end) != Pmode))
8035 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
8036 return;
8039 if (targetm.have_clear_cache ())
8041 /* We have a "clear_cache" insn, and it will handle everything. */
8042 class expand_operand ops[2];
8044 create_address_operand (&ops[0], begin);
8045 create_address_operand (&ops[1], end);
8047 if (maybe_expand_insn (targetm.code_for_clear_cache, 2, ops))
8048 return;
8050 else
8052 #ifndef CLEAR_INSN_CACHE
8053 /* There is no "clear_cache" insn, and __clear_cache() in libgcc
8054 does nothing. There is no need to call it. Do nothing. */
8055 return;
8056 #endif /* CLEAR_INSN_CACHE */
8059 targetm.calls.emit_call_builtin___clear_cache (begin, end);
8062 /* Expand a call to __builtin___clear_cache. */
8064 static void
8065 expand_builtin___clear_cache (tree exp)
8067 tree begin, end;
8068 rtx begin_rtx, end_rtx;
8070 /* We must not expand to a library call. If we did, any
8071 fallback library function in libgcc that might contain a call to
8072 __builtin___clear_cache() would recurse infinitely. */
8073 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
8075 error ("both arguments to %<__builtin___clear_cache%> must be pointers");
8076 return;
8079 begin = CALL_EXPR_ARG (exp, 0);
8080 begin_rtx = expand_expr (begin, NULL_RTX, Pmode, EXPAND_NORMAL);
8082 end = CALL_EXPR_ARG (exp, 1);
8083 end_rtx = expand_expr (end, NULL_RTX, Pmode, EXPAND_NORMAL);
8085 maybe_emit_call_builtin___clear_cache (begin_rtx, end_rtx);
8088 /* Given a trampoline address, make sure it satisfies TRAMPOLINE_ALIGNMENT. */
8090 static rtx
8091 round_trampoline_addr (rtx tramp)
8093 rtx temp, addend, mask;
8095 /* If we don't need too much alignment, we'll have been guaranteed
8096 proper alignment by get_trampoline_type. */
8097 if (TRAMPOLINE_ALIGNMENT <= STACK_BOUNDARY)
8098 return tramp;
8100 /* Round address up to desired boundary. */
8101 temp = gen_reg_rtx (Pmode);
8102 addend = gen_int_mode (TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT - 1, Pmode);
8103 mask = gen_int_mode (-TRAMPOLINE_ALIGNMENT / BITS_PER_UNIT, Pmode);
8105 temp = expand_simple_binop (Pmode, PLUS, tramp, addend,
8106 temp, 0, OPTAB_LIB_WIDEN);
8107 tramp = expand_simple_binop (Pmode, AND, temp, mask,
8108 temp, 0, OPTAB_LIB_WIDEN);
8110 return tramp;
8113 static rtx
8114 expand_builtin_init_trampoline (tree exp, bool onstack)
8116 tree t_tramp, t_func, t_chain;
8117 rtx m_tramp, r_tramp, r_chain, tmp;
8119 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE,
8120 POINTER_TYPE, VOID_TYPE))
8121 return NULL_RTX;
8123 t_tramp = CALL_EXPR_ARG (exp, 0);
8124 t_func = CALL_EXPR_ARG (exp, 1);
8125 t_chain = CALL_EXPR_ARG (exp, 2);
8127 r_tramp = expand_normal (t_tramp);
8128 m_tramp = gen_rtx_MEM (BLKmode, r_tramp);
8129 MEM_NOTRAP_P (m_tramp) = 1;
8131 /* If ONSTACK, the TRAMP argument should be the address of a field
8132 within the local function's FRAME decl. Either way, let's see if
8133 we can fill in the MEM_ATTRs for this memory. */
8134 if (TREE_CODE (t_tramp) == ADDR_EXPR)
8135 set_mem_attributes (m_tramp, TREE_OPERAND (t_tramp, 0), true);
8137 /* Creator of a heap trampoline is responsible for making sure the
8138 address is aligned to at least STACK_BOUNDARY. Normally malloc
8139 will ensure this anyhow. */
8140 tmp = round_trampoline_addr (r_tramp);
8141 if (tmp != r_tramp)
8143 m_tramp = change_address (m_tramp, BLKmode, tmp);
8144 set_mem_align (m_tramp, TRAMPOLINE_ALIGNMENT);
8145 set_mem_size (m_tramp, TRAMPOLINE_SIZE);
8148 /* The FUNC argument should be the address of the nested function.
8149 Extract the actual function decl to pass to the hook. */
8150 gcc_assert (TREE_CODE (t_func) == ADDR_EXPR);
8151 t_func = TREE_OPERAND (t_func, 0);
8152 gcc_assert (TREE_CODE (t_func) == FUNCTION_DECL);
8154 r_chain = expand_normal (t_chain);
8156 /* Generate insns to initialize the trampoline. */
8157 targetm.calls.trampoline_init (m_tramp, t_func, r_chain);
8159 if (onstack)
8161 trampolines_created = 1;
8163 if (targetm.calls.custom_function_descriptors != 0)
8164 warning_at (DECL_SOURCE_LOCATION (t_func), OPT_Wtrampolines,
8165 "trampoline generated for nested function %qD", t_func);
8168 return const0_rtx;
8171 static rtx
8172 expand_builtin_adjust_trampoline (tree exp)
8174 rtx tramp;
8176 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
8177 return NULL_RTX;
8179 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
8180 tramp = round_trampoline_addr (tramp);
8181 if (targetm.calls.trampoline_adjust_address)
8182 tramp = targetm.calls.trampoline_adjust_address (tramp);
8184 return tramp;
8187 /* Expand a call to the builtin descriptor initialization routine.
8188 A descriptor is made up of a couple of pointers to the static
8189 chain and the code entry in this order. */
8191 static rtx
8192 expand_builtin_init_descriptor (tree exp)
8194 tree t_descr, t_func, t_chain;
8195 rtx m_descr, r_descr, r_func, r_chain;
8197 if (!validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, POINTER_TYPE,
8198 VOID_TYPE))
8199 return NULL_RTX;
8201 t_descr = CALL_EXPR_ARG (exp, 0);
8202 t_func = CALL_EXPR_ARG (exp, 1);
8203 t_chain = CALL_EXPR_ARG (exp, 2);
8205 r_descr = expand_normal (t_descr);
8206 m_descr = gen_rtx_MEM (BLKmode, r_descr);
8207 MEM_NOTRAP_P (m_descr) = 1;
8208 set_mem_align (m_descr, GET_MODE_ALIGNMENT (ptr_mode));
8210 r_func = expand_normal (t_func);
8211 r_chain = expand_normal (t_chain);
8213 /* Generate insns to initialize the descriptor. */
8214 emit_move_insn (adjust_address_nv (m_descr, ptr_mode, 0), r_chain);
8215 emit_move_insn (adjust_address_nv (m_descr, ptr_mode,
8216 POINTER_SIZE / BITS_PER_UNIT), r_func);
8218 return const0_rtx;
8221 /* Expand a call to the builtin descriptor adjustment routine. */
8223 static rtx
8224 expand_builtin_adjust_descriptor (tree exp)
8226 rtx tramp;
8228 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
8229 return NULL_RTX;
8231 tramp = expand_normal (CALL_EXPR_ARG (exp, 0));
8233 /* Unalign the descriptor to allow runtime identification. */
8234 tramp = plus_constant (ptr_mode, tramp,
8235 targetm.calls.custom_function_descriptors);
8237 return force_operand (tramp, NULL_RTX);
8240 /* Expand the call EXP to the built-in signbit, signbitf or signbitl
8241 function. The function first checks whether the back end provides
8242 an insn to implement signbit for the respective mode. If not, it
8243 checks whether the floating point format of the value is such that
8244 the sign bit can be extracted. If that is not the case, error out.
8245 EXP is the expression that is a call to the builtin function; if
8246 convenient, the result should be placed in TARGET. */
8247 static rtx
8248 expand_builtin_signbit (tree exp, rtx target)
8250 const struct real_format *fmt;
8251 scalar_float_mode fmode;
8252 scalar_int_mode rmode, imode;
8253 tree arg;
8254 int word, bitpos;
8255 enum insn_code icode;
8256 rtx temp;
8257 location_t loc = EXPR_LOCATION (exp);
8259 if (!validate_arglist (exp, REAL_TYPE, VOID_TYPE))
8260 return NULL_RTX;
8262 arg = CALL_EXPR_ARG (exp, 0);
8263 fmode = SCALAR_FLOAT_TYPE_MODE (TREE_TYPE (arg));
8264 rmode = SCALAR_INT_TYPE_MODE (TREE_TYPE (exp));
8265 fmt = REAL_MODE_FORMAT (fmode);
8267 arg = builtin_save_expr (arg);
8269 /* Expand the argument yielding a RTX expression. */
8270 temp = expand_normal (arg);
8272 /* Check if the back end provides an insn that handles signbit for the
8273 argument's mode. */
8274 icode = optab_handler (signbit_optab, fmode);
8275 if (icode != CODE_FOR_nothing)
8277 rtx_insn *last = get_last_insn ();
8278 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
8279 if (maybe_emit_unop_insn (icode, target, temp, UNKNOWN))
8280 return target;
8281 delete_insns_since (last);
8284 /* For floating point formats without a sign bit, implement signbit
8285 as "ARG < 0.0". */
8286 bitpos = fmt->signbit_ro;
8287 if (bitpos < 0)
8289 /* But we can't do this if the format supports signed zero. */
8290 gcc_assert (!fmt->has_signed_zero || !HONOR_SIGNED_ZEROS (fmode));
8292 arg = fold_build2_loc (loc, LT_EXPR, TREE_TYPE (exp), arg,
8293 build_real (TREE_TYPE (arg), dconst0));
8294 return expand_expr (arg, target, VOIDmode, EXPAND_NORMAL);
8297 if (GET_MODE_SIZE (fmode) <= UNITS_PER_WORD)
8299 imode = int_mode_for_mode (fmode).require ();
8300 temp = gen_lowpart (imode, temp);
8302 else
8304 imode = word_mode;
8305 /* Handle targets with different FP word orders. */
8306 if (FLOAT_WORDS_BIG_ENDIAN)
8307 word = (GET_MODE_BITSIZE (fmode) - bitpos) / BITS_PER_WORD;
8308 else
8309 word = bitpos / BITS_PER_WORD;
8310 temp = operand_subword_force (temp, word, fmode);
8311 bitpos = bitpos % BITS_PER_WORD;
8314 /* Force the intermediate word_mode (or narrower) result into a
8315 register. This avoids attempting to create paradoxical SUBREGs
8316 of floating point modes below. */
8317 temp = force_reg (imode, temp);
8319 /* If the bitpos is within the "result mode" lowpart, the operation
8320 can be implement with a single bitwise AND. Otherwise, we need
8321 a right shift and an AND. */
8323 if (bitpos < GET_MODE_BITSIZE (rmode))
8325 wide_int mask = wi::set_bit_in_zero (bitpos, GET_MODE_PRECISION (rmode));
8327 if (GET_MODE_SIZE (imode) > GET_MODE_SIZE (rmode))
8328 temp = gen_lowpart (rmode, temp);
8329 temp = expand_binop (rmode, and_optab, temp,
8330 immed_wide_int_const (mask, rmode),
8331 NULL_RTX, 1, OPTAB_LIB_WIDEN);
8333 else
8335 /* Perform a logical right shift to place the signbit in the least
8336 significant bit, then truncate the result to the desired mode
8337 and mask just this bit. */
8338 temp = expand_shift (RSHIFT_EXPR, imode, temp, bitpos, NULL_RTX, 1);
8339 temp = gen_lowpart (rmode, temp);
8340 temp = expand_binop (rmode, and_optab, temp, const1_rtx,
8341 NULL_RTX, 1, OPTAB_LIB_WIDEN);
8344 return temp;
8347 /* Expand fork or exec calls. TARGET is the desired target of the
8348 call. EXP is the call. FN is the
8349 identificator of the actual function. IGNORE is nonzero if the
8350 value is to be ignored. */
8352 static rtx
8353 expand_builtin_fork_or_exec (tree fn, tree exp, rtx target, int ignore)
8355 tree id, decl;
8356 tree call;
8358 if (DECL_FUNCTION_CODE (fn) != BUILT_IN_FORK)
8360 tree path = CALL_EXPR_ARG (exp, 0);
8361 /* Detect unterminated path. */
8362 if (!check_read_access (exp, path))
8363 return NULL_RTX;
8365 /* Also detect unterminated first argument. */
8366 switch (DECL_FUNCTION_CODE (fn))
8368 case BUILT_IN_EXECL:
8369 case BUILT_IN_EXECLE:
8370 case BUILT_IN_EXECLP:
8371 if (!check_read_access (exp, path))
8372 return NULL_RTX;
8373 default:
8374 break;
8379 /* If we are not profiling, just call the function. */
8380 if (!profile_arc_flag)
8381 return NULL_RTX;
8383 /* Otherwise call the wrapper. This should be equivalent for the rest of
8384 compiler, so the code does not diverge, and the wrapper may run the
8385 code necessary for keeping the profiling sane. */
8387 switch (DECL_FUNCTION_CODE (fn))
8389 case BUILT_IN_FORK:
8390 id = get_identifier ("__gcov_fork");
8391 break;
8393 case BUILT_IN_EXECL:
8394 id = get_identifier ("__gcov_execl");
8395 break;
8397 case BUILT_IN_EXECV:
8398 id = get_identifier ("__gcov_execv");
8399 break;
8401 case BUILT_IN_EXECLP:
8402 id = get_identifier ("__gcov_execlp");
8403 break;
8405 case BUILT_IN_EXECLE:
8406 id = get_identifier ("__gcov_execle");
8407 break;
8409 case BUILT_IN_EXECVP:
8410 id = get_identifier ("__gcov_execvp");
8411 break;
8413 case BUILT_IN_EXECVE:
8414 id = get_identifier ("__gcov_execve");
8415 break;
8417 default:
8418 gcc_unreachable ();
8421 decl = build_decl (DECL_SOURCE_LOCATION (fn),
8422 FUNCTION_DECL, id, TREE_TYPE (fn));
8423 DECL_EXTERNAL (decl) = 1;
8424 TREE_PUBLIC (decl) = 1;
8425 DECL_ARTIFICIAL (decl) = 1;
8426 TREE_NOTHROW (decl) = 1;
8427 DECL_VISIBILITY (decl) = VISIBILITY_DEFAULT;
8428 DECL_VISIBILITY_SPECIFIED (decl) = 1;
8429 call = rewrite_call_expr (EXPR_LOCATION (exp), exp, 0, decl, 0);
8430 return expand_call (call, target, ignore);
8435 /* Reconstitute a mode for a __sync intrinsic operation. Since the type of
8436 the pointer in these functions is void*, the tree optimizers may remove
8437 casts. The mode computed in expand_builtin isn't reliable either, due
8438 to __sync_bool_compare_and_swap.
8440 FCODE_DIFF should be fcode - base, where base is the FOO_1 code for the
8441 group of builtins. This gives us log2 of the mode size. */
8443 static inline machine_mode
8444 get_builtin_sync_mode (int fcode_diff)
8446 /* The size is not negotiable, so ask not to get BLKmode in return
8447 if the target indicates that a smaller size would be better. */
8448 return int_mode_for_size (BITS_PER_UNIT << fcode_diff, 0).require ();
8451 /* Expand the memory expression LOC and return the appropriate memory operand
8452 for the builtin_sync operations. */
8454 static rtx
8455 get_builtin_sync_mem (tree loc, machine_mode mode)
8457 rtx addr, mem;
8458 int addr_space = TYPE_ADDR_SPACE (POINTER_TYPE_P (TREE_TYPE (loc))
8459 ? TREE_TYPE (TREE_TYPE (loc))
8460 : TREE_TYPE (loc));
8461 scalar_int_mode addr_mode = targetm.addr_space.address_mode (addr_space);
8463 addr = expand_expr (loc, NULL_RTX, addr_mode, EXPAND_SUM);
8464 addr = convert_memory_address (addr_mode, addr);
8466 /* Note that we explicitly do not want any alias information for this
8467 memory, so that we kill all other live memories. Otherwise we don't
8468 satisfy the full barrier semantics of the intrinsic. */
8469 mem = gen_rtx_MEM (mode, addr);
8471 set_mem_addr_space (mem, addr_space);
8473 mem = validize_mem (mem);
8475 /* The alignment needs to be at least according to that of the mode. */
8476 set_mem_align (mem, MAX (GET_MODE_ALIGNMENT (mode),
8477 get_pointer_alignment (loc)));
8478 set_mem_alias_set (mem, ALIAS_SET_MEMORY_BARRIER);
8479 MEM_VOLATILE_P (mem) = 1;
8481 return mem;
8484 /* Make sure an argument is in the right mode.
8485 EXP is the tree argument.
8486 MODE is the mode it should be in. */
8488 static rtx
8489 expand_expr_force_mode (tree exp, machine_mode mode)
8491 rtx val;
8492 machine_mode old_mode;
8494 if (TREE_CODE (exp) == SSA_NAME
8495 && TYPE_MODE (TREE_TYPE (exp)) != mode)
8497 /* Undo argument promotion if possible, as combine might not
8498 be able to do it later due to MEM_VOLATILE_P uses in the
8499 patterns. */
8500 gimple *g = get_gimple_for_ssa_name (exp);
8501 if (g && gimple_assign_cast_p (g))
8503 tree rhs = gimple_assign_rhs1 (g);
8504 tree_code code = gimple_assign_rhs_code (g);
8505 if (CONVERT_EXPR_CODE_P (code)
8506 && TYPE_MODE (TREE_TYPE (rhs)) == mode
8507 && INTEGRAL_TYPE_P (TREE_TYPE (exp))
8508 && INTEGRAL_TYPE_P (TREE_TYPE (rhs))
8509 && (TYPE_PRECISION (TREE_TYPE (exp))
8510 > TYPE_PRECISION (TREE_TYPE (rhs))))
8511 exp = rhs;
8515 val = expand_expr (exp, NULL_RTX, mode, EXPAND_NORMAL);
8516 /* If VAL is promoted to a wider mode, convert it back to MODE. Take care
8517 of CONST_INTs, where we know the old_mode only from the call argument. */
8519 old_mode = GET_MODE (val);
8520 if (old_mode == VOIDmode)
8521 old_mode = TYPE_MODE (TREE_TYPE (exp));
8522 val = convert_modes (mode, old_mode, val, 1);
8523 return val;
8527 /* Expand the __sync_xxx_and_fetch and __sync_fetch_and_xxx intrinsics.
8528 EXP is the CALL_EXPR. CODE is the rtx code
8529 that corresponds to the arithmetic or logical operation from the name;
8530 an exception here is that NOT actually means NAND. TARGET is an optional
8531 place for us to store the results; AFTER is true if this is the
8532 fetch_and_xxx form. */
8534 static rtx
8535 expand_builtin_sync_operation (machine_mode mode, tree exp,
8536 enum rtx_code code, bool after,
8537 rtx target)
8539 rtx val, mem;
8540 location_t loc = EXPR_LOCATION (exp);
8542 if (code == NOT && warn_sync_nand)
8544 tree fndecl = get_callee_fndecl (exp);
8545 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
8547 static bool warned_f_a_n, warned_n_a_f;
8549 switch (fcode)
8551 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
8552 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
8553 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
8554 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
8555 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
8556 if (warned_f_a_n)
8557 break;
8559 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_FETCH_AND_NAND_N);
8560 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
8561 warned_f_a_n = true;
8562 break;
8564 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
8565 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
8566 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
8567 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
8568 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
8569 if (warned_n_a_f)
8570 break;
8572 fndecl = builtin_decl_implicit (BUILT_IN_SYNC_NAND_AND_FETCH_N);
8573 inform (loc, "%qD changed semantics in GCC 4.4", fndecl);
8574 warned_n_a_f = true;
8575 break;
8577 default:
8578 gcc_unreachable ();
8582 /* Expand the operands. */
8583 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
8584 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
8586 return expand_atomic_fetch_op (target, mem, val, code, MEMMODEL_SYNC_SEQ_CST,
8587 after);
8590 /* Expand the __sync_val_compare_and_swap and __sync_bool_compare_and_swap
8591 intrinsics. EXP is the CALL_EXPR. IS_BOOL is
8592 true if this is the boolean form. TARGET is a place for us to store the
8593 results; this is NOT optional if IS_BOOL is true. */
8595 static rtx
8596 expand_builtin_compare_and_swap (machine_mode mode, tree exp,
8597 bool is_bool, rtx target)
8599 rtx old_val, new_val, mem;
8600 rtx *pbool, *poval;
8602 /* Expand the operands. */
8603 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
8604 old_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
8605 new_val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
8607 pbool = poval = NULL;
8608 if (target != const0_rtx)
8610 if (is_bool)
8611 pbool = &target;
8612 else
8613 poval = &target;
8615 if (!expand_atomic_compare_and_swap (pbool, poval, mem, old_val, new_val,
8616 false, MEMMODEL_SYNC_SEQ_CST,
8617 MEMMODEL_SYNC_SEQ_CST))
8618 return NULL_RTX;
8620 return target;
8623 /* Expand the __sync_lock_test_and_set intrinsic. Note that the most
8624 general form is actually an atomic exchange, and some targets only
8625 support a reduced form with the second argument being a constant 1.
8626 EXP is the CALL_EXPR; TARGET is an optional place for us to store
8627 the results. */
8629 static rtx
8630 expand_builtin_sync_lock_test_and_set (machine_mode mode, tree exp,
8631 rtx target)
8633 rtx val, mem;
8635 /* Expand the operands. */
8636 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
8637 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
8639 return expand_sync_lock_test_and_set (target, mem, val);
8642 /* Expand the __sync_lock_release intrinsic. EXP is the CALL_EXPR. */
8644 static void
8645 expand_builtin_sync_lock_release (machine_mode mode, tree exp)
8647 rtx mem;
8649 /* Expand the operands. */
8650 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
8652 expand_atomic_store (mem, const0_rtx, MEMMODEL_SYNC_RELEASE, true);
8655 /* Given an integer representing an ``enum memmodel'', verify its
8656 correctness and return the memory model enum. */
8658 static enum memmodel
8659 get_memmodel (tree exp)
8661 rtx op;
8662 unsigned HOST_WIDE_INT val;
8663 location_t loc
8664 = expansion_point_location_if_in_system_header (input_location);
8666 /* If the parameter is not a constant, it's a run time value so we'll just
8667 convert it to MEMMODEL_SEQ_CST to avoid annoying runtime checking. */
8668 if (TREE_CODE (exp) != INTEGER_CST)
8669 return MEMMODEL_SEQ_CST;
8671 op = expand_normal (exp);
8673 val = INTVAL (op);
8674 if (targetm.memmodel_check)
8675 val = targetm.memmodel_check (val);
8676 else if (val & ~MEMMODEL_MASK)
8678 warning_at (loc, OPT_Winvalid_memory_model,
8679 "unknown architecture specifier in memory model to builtin");
8680 return MEMMODEL_SEQ_CST;
8683 /* Should never see a user explicit SYNC memodel model, so >= LAST works. */
8684 if (memmodel_base (val) >= MEMMODEL_LAST)
8686 warning_at (loc, OPT_Winvalid_memory_model,
8687 "invalid memory model argument to builtin");
8688 return MEMMODEL_SEQ_CST;
8691 /* Workaround for Bugzilla 59448. GCC doesn't track consume properly, so
8692 be conservative and promote consume to acquire. */
8693 if (val == MEMMODEL_CONSUME)
8694 val = MEMMODEL_ACQUIRE;
8696 return (enum memmodel) val;
8699 /* Expand the __atomic_exchange intrinsic:
8700 TYPE __atomic_exchange (TYPE *object, TYPE desired, enum memmodel)
8701 EXP is the CALL_EXPR.
8702 TARGET is an optional place for us to store the results. */
8704 static rtx
8705 expand_builtin_atomic_exchange (machine_mode mode, tree exp, rtx target)
8707 rtx val, mem;
8708 enum memmodel model;
8710 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
8712 if (!flag_inline_atomics)
8713 return NULL_RTX;
8715 /* Expand the operands. */
8716 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
8717 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
8719 return expand_atomic_exchange (target, mem, val, model);
8722 /* Expand the __atomic_compare_exchange intrinsic:
8723 bool __atomic_compare_exchange (TYPE *object, TYPE *expect,
8724 TYPE desired, BOOL weak,
8725 enum memmodel success,
8726 enum memmodel failure)
8727 EXP is the CALL_EXPR.
8728 TARGET is an optional place for us to store the results. */
8730 static rtx
8731 expand_builtin_atomic_compare_exchange (machine_mode mode, tree exp,
8732 rtx target)
8734 rtx expect, desired, mem, oldval;
8735 rtx_code_label *label;
8736 enum memmodel success, failure;
8737 tree weak;
8738 bool is_weak;
8739 location_t loc
8740 = expansion_point_location_if_in_system_header (input_location);
8742 success = get_memmodel (CALL_EXPR_ARG (exp, 4));
8743 failure = get_memmodel (CALL_EXPR_ARG (exp, 5));
8745 if (failure > success)
8747 warning_at (loc, OPT_Winvalid_memory_model,
8748 "failure memory model cannot be stronger than success "
8749 "memory model for %<__atomic_compare_exchange%>");
8750 success = MEMMODEL_SEQ_CST;
8753 if (is_mm_release (failure) || is_mm_acq_rel (failure))
8755 warning_at (loc, OPT_Winvalid_memory_model,
8756 "invalid failure memory model for "
8757 "%<__atomic_compare_exchange%>");
8758 failure = MEMMODEL_SEQ_CST;
8759 success = MEMMODEL_SEQ_CST;
8763 if (!flag_inline_atomics)
8764 return NULL_RTX;
8766 /* Expand the operands. */
8767 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
8769 expect = expand_normal (CALL_EXPR_ARG (exp, 1));
8770 expect = convert_memory_address (Pmode, expect);
8771 expect = gen_rtx_MEM (mode, expect);
8772 desired = expand_expr_force_mode (CALL_EXPR_ARG (exp, 2), mode);
8774 weak = CALL_EXPR_ARG (exp, 3);
8775 is_weak = false;
8776 if (tree_fits_shwi_p (weak) && tree_to_shwi (weak) != 0)
8777 is_weak = true;
8779 if (target == const0_rtx)
8780 target = NULL;
8782 /* Lest the rtl backend create a race condition with an imporoper store
8783 to memory, always create a new pseudo for OLDVAL. */
8784 oldval = NULL;
8786 if (!expand_atomic_compare_and_swap (&target, &oldval, mem, expect, desired,
8787 is_weak, success, failure))
8788 return NULL_RTX;
8790 /* Conditionally store back to EXPECT, lest we create a race condition
8791 with an improper store to memory. */
8792 /* ??? With a rearrangement of atomics at the gimple level, we can handle
8793 the normal case where EXPECT is totally private, i.e. a register. At
8794 which point the store can be unconditional. */
8795 label = gen_label_rtx ();
8796 emit_cmp_and_jump_insns (target, const0_rtx, NE, NULL,
8797 GET_MODE (target), 1, label);
8798 emit_move_insn (expect, oldval);
8799 emit_label (label);
8801 return target;
8804 /* Helper function for expand_ifn_atomic_compare_exchange - expand
8805 internal ATOMIC_COMPARE_EXCHANGE call into __atomic_compare_exchange_N
8806 call. The weak parameter must be dropped to match the expected parameter
8807 list and the expected argument changed from value to pointer to memory
8808 slot. */
8810 static void
8811 expand_ifn_atomic_compare_exchange_into_call (gcall *call, machine_mode mode)
8813 unsigned int z;
8814 vec<tree, va_gc> *vec;
8816 vec_alloc (vec, 5);
8817 vec->quick_push (gimple_call_arg (call, 0));
8818 tree expected = gimple_call_arg (call, 1);
8819 rtx x = assign_stack_temp_for_type (mode, GET_MODE_SIZE (mode),
8820 TREE_TYPE (expected));
8821 rtx expd = expand_expr (expected, x, mode, EXPAND_NORMAL);
8822 if (expd != x)
8823 emit_move_insn (x, expd);
8824 tree v = make_tree (TREE_TYPE (expected), x);
8825 vec->quick_push (build1 (ADDR_EXPR,
8826 build_pointer_type (TREE_TYPE (expected)), v));
8827 vec->quick_push (gimple_call_arg (call, 2));
8828 /* Skip the boolean weak parameter. */
8829 for (z = 4; z < 6; z++)
8830 vec->quick_push (gimple_call_arg (call, z));
8831 /* At present we only have BUILT_IN_ATOMIC_COMPARE_EXCHANGE_{1,2,4,8,16}. */
8832 unsigned int bytes_log2 = exact_log2 (GET_MODE_SIZE (mode).to_constant ());
8833 gcc_assert (bytes_log2 < 5);
8834 built_in_function fncode
8835 = (built_in_function) ((int) BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1
8836 + bytes_log2);
8837 tree fndecl = builtin_decl_explicit (fncode);
8838 tree fn = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (fndecl)),
8839 fndecl);
8840 tree exp = build_call_vec (boolean_type_node, fn, vec);
8841 tree lhs = gimple_call_lhs (call);
8842 rtx boolret = expand_call (exp, NULL_RTX, lhs == NULL_TREE);
8843 if (lhs)
8845 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
8846 if (GET_MODE (boolret) != mode)
8847 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
8848 x = force_reg (mode, x);
8849 write_complex_part (target, boolret, true);
8850 write_complex_part (target, x, false);
8854 /* Expand IFN_ATOMIC_COMPARE_EXCHANGE internal function. */
8856 void
8857 expand_ifn_atomic_compare_exchange (gcall *call)
8859 int size = tree_to_shwi (gimple_call_arg (call, 3)) & 255;
8860 gcc_assert (size == 1 || size == 2 || size == 4 || size == 8 || size == 16);
8861 machine_mode mode = int_mode_for_size (BITS_PER_UNIT * size, 0).require ();
8862 rtx expect, desired, mem, oldval, boolret;
8863 enum memmodel success, failure;
8864 tree lhs;
8865 bool is_weak;
8866 location_t loc
8867 = expansion_point_location_if_in_system_header (gimple_location (call));
8869 success = get_memmodel (gimple_call_arg (call, 4));
8870 failure = get_memmodel (gimple_call_arg (call, 5));
8872 if (failure > success)
8874 warning_at (loc, OPT_Winvalid_memory_model,
8875 "failure memory model cannot be stronger than success "
8876 "memory model for %<__atomic_compare_exchange%>");
8877 success = MEMMODEL_SEQ_CST;
8880 if (is_mm_release (failure) || is_mm_acq_rel (failure))
8882 warning_at (loc, OPT_Winvalid_memory_model,
8883 "invalid failure memory model for "
8884 "%<__atomic_compare_exchange%>");
8885 failure = MEMMODEL_SEQ_CST;
8886 success = MEMMODEL_SEQ_CST;
8889 if (!flag_inline_atomics)
8891 expand_ifn_atomic_compare_exchange_into_call (call, mode);
8892 return;
8895 /* Expand the operands. */
8896 mem = get_builtin_sync_mem (gimple_call_arg (call, 0), mode);
8898 expect = expand_expr_force_mode (gimple_call_arg (call, 1), mode);
8899 desired = expand_expr_force_mode (gimple_call_arg (call, 2), mode);
8901 is_weak = (tree_to_shwi (gimple_call_arg (call, 3)) & 256) != 0;
8903 boolret = NULL;
8904 oldval = NULL;
8906 if (!expand_atomic_compare_and_swap (&boolret, &oldval, mem, expect, desired,
8907 is_weak, success, failure))
8909 expand_ifn_atomic_compare_exchange_into_call (call, mode);
8910 return;
8913 lhs = gimple_call_lhs (call);
8914 if (lhs)
8916 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
8917 if (GET_MODE (boolret) != mode)
8918 boolret = convert_modes (mode, GET_MODE (boolret), boolret, 1);
8919 write_complex_part (target, boolret, true);
8920 write_complex_part (target, oldval, false);
8924 /* Expand the __atomic_load intrinsic:
8925 TYPE __atomic_load (TYPE *object, enum memmodel)
8926 EXP is the CALL_EXPR.
8927 TARGET is an optional place for us to store the results. */
8929 static rtx
8930 expand_builtin_atomic_load (machine_mode mode, tree exp, rtx target)
8932 rtx mem;
8933 enum memmodel model;
8935 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
8936 if (is_mm_release (model) || is_mm_acq_rel (model))
8938 location_t loc
8939 = expansion_point_location_if_in_system_header (input_location);
8940 warning_at (loc, OPT_Winvalid_memory_model,
8941 "invalid memory model for %<__atomic_load%>");
8942 model = MEMMODEL_SEQ_CST;
8945 if (!flag_inline_atomics)
8946 return NULL_RTX;
8948 /* Expand the operand. */
8949 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
8951 return expand_atomic_load (target, mem, model);
8955 /* Expand the __atomic_store intrinsic:
8956 void __atomic_store (TYPE *object, TYPE desired, enum memmodel)
8957 EXP is the CALL_EXPR.
8958 TARGET is an optional place for us to store the results. */
8960 static rtx
8961 expand_builtin_atomic_store (machine_mode mode, tree exp)
8963 rtx mem, val;
8964 enum memmodel model;
8966 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
8967 if (!(is_mm_relaxed (model) || is_mm_seq_cst (model)
8968 || is_mm_release (model)))
8970 location_t loc
8971 = expansion_point_location_if_in_system_header (input_location);
8972 warning_at (loc, OPT_Winvalid_memory_model,
8973 "invalid memory model for %<__atomic_store%>");
8974 model = MEMMODEL_SEQ_CST;
8977 if (!flag_inline_atomics)
8978 return NULL_RTX;
8980 /* Expand the operands. */
8981 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
8982 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
8984 return expand_atomic_store (mem, val, model, false);
8987 /* Expand the __atomic_fetch_XXX intrinsic:
8988 TYPE __atomic_fetch_XXX (TYPE *object, TYPE val, enum memmodel)
8989 EXP is the CALL_EXPR.
8990 TARGET is an optional place for us to store the results.
8991 CODE is the operation, PLUS, MINUS, ADD, XOR, or IOR.
8992 FETCH_AFTER is true if returning the result of the operation.
8993 FETCH_AFTER is false if returning the value before the operation.
8994 IGNORE is true if the result is not used.
8995 EXT_CALL is the correct builtin for an external call if this cannot be
8996 resolved to an instruction sequence. */
8998 static rtx
8999 expand_builtin_atomic_fetch_op (machine_mode mode, tree exp, rtx target,
9000 enum rtx_code code, bool fetch_after,
9001 bool ignore, enum built_in_function ext_call)
9003 rtx val, mem, ret;
9004 enum memmodel model;
9005 tree fndecl;
9006 tree addr;
9008 model = get_memmodel (CALL_EXPR_ARG (exp, 2));
9010 /* Expand the operands. */
9011 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
9012 val = expand_expr_force_mode (CALL_EXPR_ARG (exp, 1), mode);
9014 /* Only try generating instructions if inlining is turned on. */
9015 if (flag_inline_atomics)
9017 ret = expand_atomic_fetch_op (target, mem, val, code, model, fetch_after);
9018 if (ret)
9019 return ret;
9022 /* Return if a different routine isn't needed for the library call. */
9023 if (ext_call == BUILT_IN_NONE)
9024 return NULL_RTX;
9026 /* Change the call to the specified function. */
9027 fndecl = get_callee_fndecl (exp);
9028 addr = CALL_EXPR_FN (exp);
9029 STRIP_NOPS (addr);
9031 gcc_assert (TREE_OPERAND (addr, 0) == fndecl);
9032 TREE_OPERAND (addr, 0) = builtin_decl_explicit (ext_call);
9034 /* If we will emit code after the call, the call cannot be a tail call.
9035 If it is emitted as a tail call, a barrier is emitted after it, and
9036 then all trailing code is removed. */
9037 if (!ignore)
9038 CALL_EXPR_TAILCALL (exp) = 0;
9040 /* Expand the call here so we can emit trailing code. */
9041 ret = expand_call (exp, target, ignore);
9043 /* Replace the original function just in case it matters. */
9044 TREE_OPERAND (addr, 0) = fndecl;
9046 /* Then issue the arithmetic correction to return the right result. */
9047 if (!ignore)
9049 if (code == NOT)
9051 ret = expand_simple_binop (mode, AND, ret, val, NULL_RTX, true,
9052 OPTAB_LIB_WIDEN);
9053 ret = expand_simple_unop (mode, NOT, ret, target, true);
9055 else
9056 ret = expand_simple_binop (mode, code, ret, val, target, true,
9057 OPTAB_LIB_WIDEN);
9059 return ret;
9062 /* Expand IFN_ATOMIC_BIT_TEST_AND_* internal function. */
9064 void
9065 expand_ifn_atomic_bit_test_and (gcall *call)
9067 tree ptr = gimple_call_arg (call, 0);
9068 tree bit = gimple_call_arg (call, 1);
9069 tree flag = gimple_call_arg (call, 2);
9070 tree lhs = gimple_call_lhs (call);
9071 enum memmodel model = MEMMODEL_SYNC_SEQ_CST;
9072 machine_mode mode = TYPE_MODE (TREE_TYPE (flag));
9073 enum rtx_code code;
9074 optab optab;
9075 class expand_operand ops[5];
9077 gcc_assert (flag_inline_atomics);
9079 if (gimple_call_num_args (call) == 4)
9080 model = get_memmodel (gimple_call_arg (call, 3));
9082 rtx mem = get_builtin_sync_mem (ptr, mode);
9083 rtx val = expand_expr_force_mode (bit, mode);
9085 switch (gimple_call_internal_fn (call))
9087 case IFN_ATOMIC_BIT_TEST_AND_SET:
9088 code = IOR;
9089 optab = atomic_bit_test_and_set_optab;
9090 break;
9091 case IFN_ATOMIC_BIT_TEST_AND_COMPLEMENT:
9092 code = XOR;
9093 optab = atomic_bit_test_and_complement_optab;
9094 break;
9095 case IFN_ATOMIC_BIT_TEST_AND_RESET:
9096 code = AND;
9097 optab = atomic_bit_test_and_reset_optab;
9098 break;
9099 default:
9100 gcc_unreachable ();
9103 if (lhs == NULL_TREE)
9105 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
9106 val, NULL_RTX, true, OPTAB_DIRECT);
9107 if (code == AND)
9108 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
9109 expand_atomic_fetch_op (const0_rtx, mem, val, code, model, false);
9110 return;
9113 rtx target = expand_expr (lhs, NULL_RTX, VOIDmode, EXPAND_WRITE);
9114 enum insn_code icode = direct_optab_handler (optab, mode);
9115 gcc_assert (icode != CODE_FOR_nothing);
9116 create_output_operand (&ops[0], target, mode);
9117 create_fixed_operand (&ops[1], mem);
9118 create_convert_operand_to (&ops[2], val, mode, true);
9119 create_integer_operand (&ops[3], model);
9120 create_integer_operand (&ops[4], integer_onep (flag));
9121 if (maybe_expand_insn (icode, 5, ops))
9122 return;
9124 rtx bitval = val;
9125 val = expand_simple_binop (mode, ASHIFT, const1_rtx,
9126 val, NULL_RTX, true, OPTAB_DIRECT);
9127 rtx maskval = val;
9128 if (code == AND)
9129 val = expand_simple_unop (mode, NOT, val, NULL_RTX, true);
9130 rtx result = expand_atomic_fetch_op (gen_reg_rtx (mode), mem, val,
9131 code, model, false);
9132 if (integer_onep (flag))
9134 result = expand_simple_binop (mode, ASHIFTRT, result, bitval,
9135 NULL_RTX, true, OPTAB_DIRECT);
9136 result = expand_simple_binop (mode, AND, result, const1_rtx, target,
9137 true, OPTAB_DIRECT);
9139 else
9140 result = expand_simple_binop (mode, AND, result, maskval, target, true,
9141 OPTAB_DIRECT);
9142 if (result != target)
9143 emit_move_insn (target, result);
9146 /* Expand an atomic clear operation.
9147 void _atomic_clear (BOOL *obj, enum memmodel)
9148 EXP is the call expression. */
9150 static rtx
9151 expand_builtin_atomic_clear (tree exp)
9153 machine_mode mode;
9154 rtx mem, ret;
9155 enum memmodel model;
9157 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
9158 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
9159 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
9161 if (is_mm_consume (model) || is_mm_acquire (model) || is_mm_acq_rel (model))
9163 location_t loc
9164 = expansion_point_location_if_in_system_header (input_location);
9165 warning_at (loc, OPT_Winvalid_memory_model,
9166 "invalid memory model for %<__atomic_store%>");
9167 model = MEMMODEL_SEQ_CST;
9170 /* Try issuing an __atomic_store, and allow fallback to __sync_lock_release.
9171 Failing that, a store is issued by __atomic_store. The only way this can
9172 fail is if the bool type is larger than a word size. Unlikely, but
9173 handle it anyway for completeness. Assume a single threaded model since
9174 there is no atomic support in this case, and no barriers are required. */
9175 ret = expand_atomic_store (mem, const0_rtx, model, true);
9176 if (!ret)
9177 emit_move_insn (mem, const0_rtx);
9178 return const0_rtx;
9181 /* Expand an atomic test_and_set operation.
9182 bool _atomic_test_and_set (BOOL *obj, enum memmodel)
9183 EXP is the call expression. */
9185 static rtx
9186 expand_builtin_atomic_test_and_set (tree exp, rtx target)
9188 rtx mem;
9189 enum memmodel model;
9190 machine_mode mode;
9192 mode = int_mode_for_size (BOOL_TYPE_SIZE, 0).require ();
9193 mem = get_builtin_sync_mem (CALL_EXPR_ARG (exp, 0), mode);
9194 model = get_memmodel (CALL_EXPR_ARG (exp, 1));
9196 return expand_atomic_test_and_set (target, mem, model);
9200 /* Return true if (optional) argument ARG1 of size ARG0 is always lock free on
9201 this architecture. If ARG1 is NULL, use typical alignment for size ARG0. */
9203 static tree
9204 fold_builtin_atomic_always_lock_free (tree arg0, tree arg1)
9206 int size;
9207 machine_mode mode;
9208 unsigned int mode_align, type_align;
9210 if (TREE_CODE (arg0) != INTEGER_CST)
9211 return NULL_TREE;
9213 /* We need a corresponding integer mode for the access to be lock-free. */
9214 size = INTVAL (expand_normal (arg0)) * BITS_PER_UNIT;
9215 if (!int_mode_for_size (size, 0).exists (&mode))
9216 return boolean_false_node;
9218 mode_align = GET_MODE_ALIGNMENT (mode);
9220 if (TREE_CODE (arg1) == INTEGER_CST)
9222 unsigned HOST_WIDE_INT val = UINTVAL (expand_normal (arg1));
9224 /* Either this argument is null, or it's a fake pointer encoding
9225 the alignment of the object. */
9226 val = least_bit_hwi (val);
9227 val *= BITS_PER_UNIT;
9229 if (val == 0 || mode_align < val)
9230 type_align = mode_align;
9231 else
9232 type_align = val;
9234 else
9236 tree ttype = TREE_TYPE (arg1);
9238 /* This function is usually invoked and folded immediately by the front
9239 end before anything else has a chance to look at it. The pointer
9240 parameter at this point is usually cast to a void *, so check for that
9241 and look past the cast. */
9242 if (CONVERT_EXPR_P (arg1)
9243 && POINTER_TYPE_P (ttype)
9244 && VOID_TYPE_P (TREE_TYPE (ttype))
9245 && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (arg1, 0))))
9246 arg1 = TREE_OPERAND (arg1, 0);
9248 ttype = TREE_TYPE (arg1);
9249 gcc_assert (POINTER_TYPE_P (ttype));
9251 /* Get the underlying type of the object. */
9252 ttype = TREE_TYPE (ttype);
9253 type_align = TYPE_ALIGN (ttype);
9256 /* If the object has smaller alignment, the lock free routines cannot
9257 be used. */
9258 if (type_align < mode_align)
9259 return boolean_false_node;
9261 /* Check if a compare_and_swap pattern exists for the mode which represents
9262 the required size. The pattern is not allowed to fail, so the existence
9263 of the pattern indicates support is present. Also require that an
9264 atomic load exists for the required size. */
9265 if (can_compare_and_swap_p (mode, true) && can_atomic_load_p (mode))
9266 return boolean_true_node;
9267 else
9268 return boolean_false_node;
9271 /* Return true if the parameters to call EXP represent an object which will
9272 always generate lock free instructions. The first argument represents the
9273 size of the object, and the second parameter is a pointer to the object
9274 itself. If NULL is passed for the object, then the result is based on
9275 typical alignment for an object of the specified size. Otherwise return
9276 false. */
9278 static rtx
9279 expand_builtin_atomic_always_lock_free (tree exp)
9281 tree size;
9282 tree arg0 = CALL_EXPR_ARG (exp, 0);
9283 tree arg1 = CALL_EXPR_ARG (exp, 1);
9285 if (TREE_CODE (arg0) != INTEGER_CST)
9287 error ("non-constant argument 1 to %qs", "__atomic_always_lock_free");
9288 return const0_rtx;
9291 size = fold_builtin_atomic_always_lock_free (arg0, arg1);
9292 if (size == boolean_true_node)
9293 return const1_rtx;
9294 return const0_rtx;
9297 /* Return a one or zero if it can be determined that object ARG1 of size ARG
9298 is lock free on this architecture. */
9300 static tree
9301 fold_builtin_atomic_is_lock_free (tree arg0, tree arg1)
9303 if (!flag_inline_atomics)
9304 return NULL_TREE;
9306 /* If it isn't always lock free, don't generate a result. */
9307 if (fold_builtin_atomic_always_lock_free (arg0, arg1) == boolean_true_node)
9308 return boolean_true_node;
9310 return NULL_TREE;
9313 /* Return true if the parameters to call EXP represent an object which will
9314 always generate lock free instructions. The first argument represents the
9315 size of the object, and the second parameter is a pointer to the object
9316 itself. If NULL is passed for the object, then the result is based on
9317 typical alignment for an object of the specified size. Otherwise return
9318 NULL*/
9320 static rtx
9321 expand_builtin_atomic_is_lock_free (tree exp)
9323 tree size;
9324 tree arg0 = CALL_EXPR_ARG (exp, 0);
9325 tree arg1 = CALL_EXPR_ARG (exp, 1);
9327 if (!INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
9329 error ("non-integer argument 1 to %qs", "__atomic_is_lock_free");
9330 return NULL_RTX;
9333 if (!flag_inline_atomics)
9334 return NULL_RTX;
9336 /* If the value is known at compile time, return the RTX for it. */
9337 size = fold_builtin_atomic_is_lock_free (arg0, arg1);
9338 if (size == boolean_true_node)
9339 return const1_rtx;
9341 return NULL_RTX;
9344 /* Expand the __atomic_thread_fence intrinsic:
9345 void __atomic_thread_fence (enum memmodel)
9346 EXP is the CALL_EXPR. */
9348 static void
9349 expand_builtin_atomic_thread_fence (tree exp)
9351 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
9352 expand_mem_thread_fence (model);
9355 /* Expand the __atomic_signal_fence intrinsic:
9356 void __atomic_signal_fence (enum memmodel)
9357 EXP is the CALL_EXPR. */
9359 static void
9360 expand_builtin_atomic_signal_fence (tree exp)
9362 enum memmodel model = get_memmodel (CALL_EXPR_ARG (exp, 0));
9363 expand_mem_signal_fence (model);
9366 /* Expand the __sync_synchronize intrinsic. */
9368 static void
9369 expand_builtin_sync_synchronize (void)
9371 expand_mem_thread_fence (MEMMODEL_SYNC_SEQ_CST);
9374 static rtx
9375 expand_builtin_thread_pointer (tree exp, rtx target)
9377 enum insn_code icode;
9378 if (!validate_arglist (exp, VOID_TYPE))
9379 return const0_rtx;
9380 icode = direct_optab_handler (get_thread_pointer_optab, Pmode);
9381 if (icode != CODE_FOR_nothing)
9383 class expand_operand op;
9384 /* If the target is not sutitable then create a new target. */
9385 if (target == NULL_RTX
9386 || !REG_P (target)
9387 || GET_MODE (target) != Pmode)
9388 target = gen_reg_rtx (Pmode);
9389 create_output_operand (&op, target, Pmode);
9390 expand_insn (icode, 1, &op);
9391 return target;
9393 error ("%<__builtin_thread_pointer%> is not supported on this target");
9394 return const0_rtx;
9397 static void
9398 expand_builtin_set_thread_pointer (tree exp)
9400 enum insn_code icode;
9401 if (!validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
9402 return;
9403 icode = direct_optab_handler (set_thread_pointer_optab, Pmode);
9404 if (icode != CODE_FOR_nothing)
9406 class expand_operand op;
9407 rtx val = expand_expr (CALL_EXPR_ARG (exp, 0), NULL_RTX,
9408 Pmode, EXPAND_NORMAL);
9409 create_input_operand (&op, val, Pmode);
9410 expand_insn (icode, 1, &op);
9411 return;
9413 error ("%<__builtin_set_thread_pointer%> is not supported on this target");
9417 /* Emit code to restore the current value of stack. */
9419 static void
9420 expand_stack_restore (tree var)
9422 rtx_insn *prev;
9423 rtx sa = expand_normal (var);
9425 sa = convert_memory_address (Pmode, sa);
9427 prev = get_last_insn ();
9428 emit_stack_restore (SAVE_BLOCK, sa);
9430 record_new_stack_level ();
9432 fixup_args_size_notes (prev, get_last_insn (), 0);
9435 /* Emit code to save the current value of stack. */
9437 static rtx
9438 expand_stack_save (void)
9440 rtx ret = NULL_RTX;
9442 emit_stack_save (SAVE_BLOCK, &ret);
9443 return ret;
9446 /* Emit code to get the openacc gang, worker or vector id or size. */
9448 static rtx
9449 expand_builtin_goacc_parlevel_id_size (tree exp, rtx target, int ignore)
9451 const char *name;
9452 rtx fallback_retval;
9453 rtx_insn *(*gen_fn) (rtx, rtx);
9454 switch (DECL_FUNCTION_CODE (get_callee_fndecl (exp)))
9456 case BUILT_IN_GOACC_PARLEVEL_ID:
9457 name = "__builtin_goacc_parlevel_id";
9458 fallback_retval = const0_rtx;
9459 gen_fn = targetm.gen_oacc_dim_pos;
9460 break;
9461 case BUILT_IN_GOACC_PARLEVEL_SIZE:
9462 name = "__builtin_goacc_parlevel_size";
9463 fallback_retval = const1_rtx;
9464 gen_fn = targetm.gen_oacc_dim_size;
9465 break;
9466 default:
9467 gcc_unreachable ();
9470 if (oacc_get_fn_attrib (current_function_decl) == NULL_TREE)
9472 error ("%qs only supported in OpenACC code", name);
9473 return const0_rtx;
9476 tree arg = CALL_EXPR_ARG (exp, 0);
9477 if (TREE_CODE (arg) != INTEGER_CST)
9479 error ("non-constant argument 0 to %qs", name);
9480 return const0_rtx;
9483 int dim = TREE_INT_CST_LOW (arg);
9484 switch (dim)
9486 case GOMP_DIM_GANG:
9487 case GOMP_DIM_WORKER:
9488 case GOMP_DIM_VECTOR:
9489 break;
9490 default:
9491 error ("illegal argument 0 to %qs", name);
9492 return const0_rtx;
9495 if (ignore)
9496 return target;
9498 if (target == NULL_RTX)
9499 target = gen_reg_rtx (TYPE_MODE (TREE_TYPE (exp)));
9501 if (!targetm.have_oacc_dim_size ())
9503 emit_move_insn (target, fallback_retval);
9504 return target;
9507 rtx reg = MEM_P (target) ? gen_reg_rtx (GET_MODE (target)) : target;
9508 emit_insn (gen_fn (reg, GEN_INT (dim)));
9509 if (reg != target)
9510 emit_move_insn (target, reg);
9512 return target;
9515 /* Expand a string compare operation using a sequence of char comparison
9516 to get rid of the calling overhead, with result going to TARGET if
9517 that's convenient.
9519 VAR_STR is the variable string source;
9520 CONST_STR is the constant string source;
9521 LENGTH is the number of chars to compare;
9522 CONST_STR_N indicates which source string is the constant string;
9523 IS_MEMCMP indicates whether it's a memcmp or strcmp.
9525 to: (assume const_str_n is 2, i.e., arg2 is a constant string)
9527 target = (int) (unsigned char) var_str[0]
9528 - (int) (unsigned char) const_str[0];
9529 if (target != 0)
9530 goto ne_label;
9532 target = (int) (unsigned char) var_str[length - 2]
9533 - (int) (unsigned char) const_str[length - 2];
9534 if (target != 0)
9535 goto ne_label;
9536 target = (int) (unsigned char) var_str[length - 1]
9537 - (int) (unsigned char) const_str[length - 1];
9538 ne_label:
9541 static rtx
9542 inline_string_cmp (rtx target, tree var_str, const char *const_str,
9543 unsigned HOST_WIDE_INT length,
9544 int const_str_n, machine_mode mode)
9546 HOST_WIDE_INT offset = 0;
9547 rtx var_rtx_array
9548 = get_memory_rtx (var_str, build_int_cst (unsigned_type_node,length));
9549 rtx var_rtx = NULL_RTX;
9550 rtx const_rtx = NULL_RTX;
9551 rtx result = target ? target : gen_reg_rtx (mode);
9552 rtx_code_label *ne_label = gen_label_rtx ();
9553 tree unit_type_node = unsigned_char_type_node;
9554 scalar_int_mode unit_mode
9555 = as_a <scalar_int_mode> TYPE_MODE (unit_type_node);
9557 start_sequence ();
9559 for (unsigned HOST_WIDE_INT i = 0; i < length; i++)
9561 var_rtx
9562 = adjust_address (var_rtx_array, TYPE_MODE (unit_type_node), offset);
9563 const_rtx = c_readstr (const_str + offset, unit_mode);
9564 rtx op0 = (const_str_n == 1) ? const_rtx : var_rtx;
9565 rtx op1 = (const_str_n == 1) ? var_rtx : const_rtx;
9567 op0 = convert_modes (mode, unit_mode, op0, 1);
9568 op1 = convert_modes (mode, unit_mode, op1, 1);
9569 result = expand_simple_binop (mode, MINUS, op0, op1,
9570 result, 1, OPTAB_WIDEN);
9571 if (i < length - 1)
9572 emit_cmp_and_jump_insns (result, CONST0_RTX (mode), NE, NULL_RTX,
9573 mode, true, ne_label);
9574 offset += GET_MODE_SIZE (unit_mode);
9577 emit_label (ne_label);
9578 rtx_insn *insns = get_insns ();
9579 end_sequence ();
9580 emit_insn (insns);
9582 return result;
9585 /* Inline expansion of a call to str(n)cmp and memcmp, with result going
9586 to TARGET if that's convenient.
9587 If the call is not been inlined, return NULL_RTX. */
9589 static rtx
9590 inline_expand_builtin_bytecmp (tree exp, rtx target)
9592 tree fndecl = get_callee_fndecl (exp);
9593 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9594 bool is_ncmp = (fcode == BUILT_IN_STRNCMP || fcode == BUILT_IN_MEMCMP);
9596 /* Do NOT apply this inlining expansion when optimizing for size or
9597 optimization level below 2. */
9598 if (optimize < 2 || optimize_insn_for_size_p ())
9599 return NULL_RTX;
9601 gcc_checking_assert (fcode == BUILT_IN_STRCMP
9602 || fcode == BUILT_IN_STRNCMP
9603 || fcode == BUILT_IN_MEMCMP);
9605 /* On a target where the type of the call (int) has same or narrower presicion
9606 than unsigned char, give up the inlining expansion. */
9607 if (TYPE_PRECISION (unsigned_char_type_node)
9608 >= TYPE_PRECISION (TREE_TYPE (exp)))
9609 return NULL_RTX;
9611 tree arg1 = CALL_EXPR_ARG (exp, 0);
9612 tree arg2 = CALL_EXPR_ARG (exp, 1);
9613 tree len3_tree = is_ncmp ? CALL_EXPR_ARG (exp, 2) : NULL_TREE;
9615 unsigned HOST_WIDE_INT len1 = 0;
9616 unsigned HOST_WIDE_INT len2 = 0;
9617 unsigned HOST_WIDE_INT len3 = 0;
9619 /* Get the object representation of the initializers of ARG1 and ARG2
9620 as strings, provided they refer to constant objects, with their byte
9621 sizes in LEN1 and LEN2, respectively. */
9622 const char *bytes1 = getbyterep (arg1, &len1);
9623 const char *bytes2 = getbyterep (arg2, &len2);
9625 /* Fail if neither argument refers to an initialized constant. */
9626 if (!bytes1 && !bytes2)
9627 return NULL_RTX;
9629 if (is_ncmp)
9631 /* Fail if the memcmp/strncmp bound is not a constant. */
9632 if (!tree_fits_uhwi_p (len3_tree))
9633 return NULL_RTX;
9635 len3 = tree_to_uhwi (len3_tree);
9637 if (fcode == BUILT_IN_MEMCMP)
9639 /* Fail if the memcmp bound is greater than the size of either
9640 of the two constant objects. */
9641 if ((bytes1 && len1 < len3)
9642 || (bytes2 && len2 < len3))
9643 return NULL_RTX;
9647 if (fcode != BUILT_IN_MEMCMP)
9649 /* For string functions (i.e., strcmp and strncmp) reduce LEN1
9650 and LEN2 to the length of the nul-terminated string stored
9651 in each. */
9652 if (bytes1 != NULL)
9653 len1 = strnlen (bytes1, len1) + 1;
9654 if (bytes2 != NULL)
9655 len2 = strnlen (bytes2, len2) + 1;
9658 /* See inline_string_cmp. */
9659 int const_str_n;
9660 if (!len1)
9661 const_str_n = 2;
9662 else if (!len2)
9663 const_str_n = 1;
9664 else if (len2 > len1)
9665 const_str_n = 1;
9666 else
9667 const_str_n = 2;
9669 /* For strncmp only, compute the new bound as the smallest of
9670 the lengths of the two strings (plus 1) and the bound provided
9671 to the function. */
9672 unsigned HOST_WIDE_INT bound = (const_str_n == 1) ? len1 : len2;
9673 if (is_ncmp && len3 < bound)
9674 bound = len3;
9676 /* If the bound of the comparison is larger than the threshold,
9677 do nothing. */
9678 if (bound > (unsigned HOST_WIDE_INT) param_builtin_string_cmp_inline_length)
9679 return NULL_RTX;
9681 machine_mode mode = TYPE_MODE (TREE_TYPE (exp));
9683 /* Now, start inline expansion the call. */
9684 return inline_string_cmp (target, (const_str_n == 1) ? arg2 : arg1,
9685 (const_str_n == 1) ? bytes1 : bytes2, bound,
9686 const_str_n, mode);
9689 /* Expand a call to __builtin_speculation_safe_value_<N>. MODE
9690 represents the size of the first argument to that call, or VOIDmode
9691 if the argument is a pointer. IGNORE will be true if the result
9692 isn't used. */
9693 static rtx
9694 expand_speculation_safe_value (machine_mode mode, tree exp, rtx target,
9695 bool ignore)
9697 rtx val, failsafe;
9698 unsigned nargs = call_expr_nargs (exp);
9700 tree arg0 = CALL_EXPR_ARG (exp, 0);
9702 if (mode == VOIDmode)
9704 mode = TYPE_MODE (TREE_TYPE (arg0));
9705 gcc_assert (GET_MODE_CLASS (mode) == MODE_INT);
9708 val = expand_expr (arg0, NULL_RTX, mode, EXPAND_NORMAL);
9710 /* An optional second argument can be used as a failsafe value on
9711 some machines. If it isn't present, then the failsafe value is
9712 assumed to be 0. */
9713 if (nargs > 1)
9715 tree arg1 = CALL_EXPR_ARG (exp, 1);
9716 failsafe = expand_expr (arg1, NULL_RTX, mode, EXPAND_NORMAL);
9718 else
9719 failsafe = const0_rtx;
9721 /* If the result isn't used, the behavior is undefined. It would be
9722 nice to emit a warning here, but path splitting means this might
9723 happen with legitimate code. So simply drop the builtin
9724 expansion in that case; we've handled any side-effects above. */
9725 if (ignore)
9726 return const0_rtx;
9728 /* If we don't have a suitable target, create one to hold the result. */
9729 if (target == NULL || GET_MODE (target) != mode)
9730 target = gen_reg_rtx (mode);
9732 if (GET_MODE (val) != mode && GET_MODE (val) != VOIDmode)
9733 val = convert_modes (mode, VOIDmode, val, false);
9735 return targetm.speculation_safe_value (mode, target, val, failsafe);
9738 /* Expand an expression EXP that calls a built-in function,
9739 with result going to TARGET if that's convenient
9740 (and in mode MODE if that's convenient).
9741 SUBTARGET may be used as the target for computing one of EXP's operands.
9742 IGNORE is nonzero if the value is to be ignored. */
9745 expand_builtin (tree exp, rtx target, rtx subtarget, machine_mode mode,
9746 int ignore)
9748 tree fndecl = get_callee_fndecl (exp);
9749 machine_mode target_mode = TYPE_MODE (TREE_TYPE (exp));
9750 int flags;
9752 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
9753 return targetm.expand_builtin (exp, target, subtarget, mode, ignore);
9755 /* When ASan is enabled, we don't want to expand some memory/string
9756 builtins and rely on libsanitizer's hooks. This allows us to avoid
9757 redundant checks and be sure, that possible overflow will be detected
9758 by ASan. */
9760 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
9761 if ((flag_sanitize & SANITIZE_ADDRESS) && asan_intercepted_p (fcode))
9762 return expand_call (exp, target, ignore);
9764 /* When not optimizing, generate calls to library functions for a certain
9765 set of builtins. */
9766 if (!optimize
9767 && !called_as_built_in (fndecl)
9768 && fcode != BUILT_IN_FORK
9769 && fcode != BUILT_IN_EXECL
9770 && fcode != BUILT_IN_EXECV
9771 && fcode != BUILT_IN_EXECLP
9772 && fcode != BUILT_IN_EXECLE
9773 && fcode != BUILT_IN_EXECVP
9774 && fcode != BUILT_IN_EXECVE
9775 && fcode != BUILT_IN_CLEAR_CACHE
9776 && !ALLOCA_FUNCTION_CODE_P (fcode)
9777 && fcode != BUILT_IN_FREE)
9778 return expand_call (exp, target, ignore);
9780 /* The built-in function expanders test for target == const0_rtx
9781 to determine whether the function's result will be ignored. */
9782 if (ignore)
9783 target = const0_rtx;
9785 /* If the result of a pure or const built-in function is ignored, and
9786 none of its arguments are volatile, we can avoid expanding the
9787 built-in call and just evaluate the arguments for side-effects. */
9788 if (target == const0_rtx
9789 && ((flags = flags_from_decl_or_type (fndecl)) & (ECF_CONST | ECF_PURE))
9790 && !(flags & ECF_LOOPING_CONST_OR_PURE))
9792 bool volatilep = false;
9793 tree arg;
9794 call_expr_arg_iterator iter;
9796 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
9797 if (TREE_THIS_VOLATILE (arg))
9799 volatilep = true;
9800 break;
9803 if (! volatilep)
9805 FOR_EACH_CALL_EXPR_ARG (arg, iter, exp)
9806 expand_expr (arg, const0_rtx, VOIDmode, EXPAND_NORMAL);
9807 return const0_rtx;
9811 switch (fcode)
9813 CASE_FLT_FN (BUILT_IN_FABS):
9814 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
9815 case BUILT_IN_FABSD32:
9816 case BUILT_IN_FABSD64:
9817 case BUILT_IN_FABSD128:
9818 target = expand_builtin_fabs (exp, target, subtarget);
9819 if (target)
9820 return target;
9821 break;
9823 CASE_FLT_FN (BUILT_IN_COPYSIGN):
9824 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
9825 target = expand_builtin_copysign (exp, target, subtarget);
9826 if (target)
9827 return target;
9828 break;
9830 /* Just do a normal library call if we were unable to fold
9831 the values. */
9832 CASE_FLT_FN (BUILT_IN_CABS):
9833 break;
9835 CASE_FLT_FN (BUILT_IN_FMA):
9836 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
9837 target = expand_builtin_mathfn_ternary (exp, target, subtarget);
9838 if (target)
9839 return target;
9840 break;
9842 CASE_FLT_FN (BUILT_IN_ILOGB):
9843 if (! flag_unsafe_math_optimizations)
9844 break;
9845 gcc_fallthrough ();
9846 CASE_FLT_FN (BUILT_IN_ISINF):
9847 CASE_FLT_FN (BUILT_IN_FINITE):
9848 case BUILT_IN_ISFINITE:
9849 case BUILT_IN_ISNORMAL:
9850 target = expand_builtin_interclass_mathfn (exp, target);
9851 if (target)
9852 return target;
9853 break;
9855 CASE_FLT_FN (BUILT_IN_ICEIL):
9856 CASE_FLT_FN (BUILT_IN_LCEIL):
9857 CASE_FLT_FN (BUILT_IN_LLCEIL):
9858 CASE_FLT_FN (BUILT_IN_LFLOOR):
9859 CASE_FLT_FN (BUILT_IN_IFLOOR):
9860 CASE_FLT_FN (BUILT_IN_LLFLOOR):
9861 target = expand_builtin_int_roundingfn (exp, target);
9862 if (target)
9863 return target;
9864 break;
9866 CASE_FLT_FN (BUILT_IN_IRINT):
9867 CASE_FLT_FN (BUILT_IN_LRINT):
9868 CASE_FLT_FN (BUILT_IN_LLRINT):
9869 CASE_FLT_FN (BUILT_IN_IROUND):
9870 CASE_FLT_FN (BUILT_IN_LROUND):
9871 CASE_FLT_FN (BUILT_IN_LLROUND):
9872 target = expand_builtin_int_roundingfn_2 (exp, target);
9873 if (target)
9874 return target;
9875 break;
9877 CASE_FLT_FN (BUILT_IN_POWI):
9878 target = expand_builtin_powi (exp, target);
9879 if (target)
9880 return target;
9881 break;
9883 CASE_FLT_FN (BUILT_IN_CEXPI):
9884 target = expand_builtin_cexpi (exp, target);
9885 gcc_assert (target);
9886 return target;
9888 CASE_FLT_FN (BUILT_IN_SIN):
9889 CASE_FLT_FN (BUILT_IN_COS):
9890 if (! flag_unsafe_math_optimizations)
9891 break;
9892 target = expand_builtin_mathfn_3 (exp, target, subtarget);
9893 if (target)
9894 return target;
9895 break;
9897 CASE_FLT_FN (BUILT_IN_SINCOS):
9898 if (! flag_unsafe_math_optimizations)
9899 break;
9900 target = expand_builtin_sincos (exp);
9901 if (target)
9902 return target;
9903 break;
9905 case BUILT_IN_APPLY_ARGS:
9906 return expand_builtin_apply_args ();
9908 /* __builtin_apply (FUNCTION, ARGUMENTS, ARGSIZE) invokes
9909 FUNCTION with a copy of the parameters described by
9910 ARGUMENTS, and ARGSIZE. It returns a block of memory
9911 allocated on the stack into which is stored all the registers
9912 that might possibly be used for returning the result of a
9913 function. ARGUMENTS is the value returned by
9914 __builtin_apply_args. ARGSIZE is the number of bytes of
9915 arguments that must be copied. ??? How should this value be
9916 computed? We'll also need a safe worst case value for varargs
9917 functions. */
9918 case BUILT_IN_APPLY:
9919 if (!validate_arglist (exp, POINTER_TYPE,
9920 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE)
9921 && !validate_arglist (exp, REFERENCE_TYPE,
9922 POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
9923 return const0_rtx;
9924 else
9926 rtx ops[3];
9928 ops[0] = expand_normal (CALL_EXPR_ARG (exp, 0));
9929 ops[1] = expand_normal (CALL_EXPR_ARG (exp, 1));
9930 ops[2] = expand_normal (CALL_EXPR_ARG (exp, 2));
9932 return expand_builtin_apply (ops[0], ops[1], ops[2]);
9935 /* __builtin_return (RESULT) causes the function to return the
9936 value described by RESULT. RESULT is address of the block of
9937 memory returned by __builtin_apply. */
9938 case BUILT_IN_RETURN:
9939 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
9940 expand_builtin_return (expand_normal (CALL_EXPR_ARG (exp, 0)));
9941 return const0_rtx;
9943 case BUILT_IN_SAVEREGS:
9944 return expand_builtin_saveregs ();
9946 case BUILT_IN_VA_ARG_PACK:
9947 /* All valid uses of __builtin_va_arg_pack () are removed during
9948 inlining. */
9949 error ("%Kinvalid use of %<__builtin_va_arg_pack ()%>", exp);
9950 return const0_rtx;
9952 case BUILT_IN_VA_ARG_PACK_LEN:
9953 /* All valid uses of __builtin_va_arg_pack_len () are removed during
9954 inlining. */
9955 error ("%Kinvalid use of %<__builtin_va_arg_pack_len ()%>", exp);
9956 return const0_rtx;
9958 /* Return the address of the first anonymous stack arg. */
9959 case BUILT_IN_NEXT_ARG:
9960 if (fold_builtin_next_arg (exp, false))
9961 return const0_rtx;
9962 return expand_builtin_next_arg ();
9964 case BUILT_IN_CLEAR_CACHE:
9965 expand_builtin___clear_cache (exp);
9966 return const0_rtx;
9968 case BUILT_IN_CLASSIFY_TYPE:
9969 return expand_builtin_classify_type (exp);
9971 case BUILT_IN_CONSTANT_P:
9972 return const0_rtx;
9974 case BUILT_IN_FRAME_ADDRESS:
9975 case BUILT_IN_RETURN_ADDRESS:
9976 return expand_builtin_frame_address (fndecl, exp);
9978 /* Returns the address of the area where the structure is returned.
9979 0 otherwise. */
9980 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
9981 if (call_expr_nargs (exp) != 0
9982 || ! AGGREGATE_TYPE_P (TREE_TYPE (TREE_TYPE (current_function_decl)))
9983 || !MEM_P (DECL_RTL (DECL_RESULT (current_function_decl))))
9984 return const0_rtx;
9985 else
9986 return XEXP (DECL_RTL (DECL_RESULT (current_function_decl)), 0);
9988 CASE_BUILT_IN_ALLOCA:
9989 target = expand_builtin_alloca (exp);
9990 if (target)
9991 return target;
9992 break;
9994 case BUILT_IN_ASAN_ALLOCAS_UNPOISON:
9995 return expand_asan_emit_allocas_unpoison (exp);
9997 case BUILT_IN_STACK_SAVE:
9998 return expand_stack_save ();
10000 case BUILT_IN_STACK_RESTORE:
10001 expand_stack_restore (CALL_EXPR_ARG (exp, 0));
10002 return const0_rtx;
10004 case BUILT_IN_BSWAP16:
10005 case BUILT_IN_BSWAP32:
10006 case BUILT_IN_BSWAP64:
10007 case BUILT_IN_BSWAP128:
10008 target = expand_builtin_bswap (target_mode, exp, target, subtarget);
10009 if (target)
10010 return target;
10011 break;
10013 CASE_INT_FN (BUILT_IN_FFS):
10014 target = expand_builtin_unop (target_mode, exp, target,
10015 subtarget, ffs_optab);
10016 if (target)
10017 return target;
10018 break;
10020 CASE_INT_FN (BUILT_IN_CLZ):
10021 target = expand_builtin_unop (target_mode, exp, target,
10022 subtarget, clz_optab);
10023 if (target)
10024 return target;
10025 break;
10027 CASE_INT_FN (BUILT_IN_CTZ):
10028 target = expand_builtin_unop (target_mode, exp, target,
10029 subtarget, ctz_optab);
10030 if (target)
10031 return target;
10032 break;
10034 CASE_INT_FN (BUILT_IN_CLRSB):
10035 target = expand_builtin_unop (target_mode, exp, target,
10036 subtarget, clrsb_optab);
10037 if (target)
10038 return target;
10039 break;
10041 CASE_INT_FN (BUILT_IN_POPCOUNT):
10042 target = expand_builtin_unop (target_mode, exp, target,
10043 subtarget, popcount_optab);
10044 if (target)
10045 return target;
10046 break;
10048 CASE_INT_FN (BUILT_IN_PARITY):
10049 target = expand_builtin_unop (target_mode, exp, target,
10050 subtarget, parity_optab);
10051 if (target)
10052 return target;
10053 break;
10055 case BUILT_IN_STRLEN:
10056 target = expand_builtin_strlen (exp, target, target_mode);
10057 if (target)
10058 return target;
10059 break;
10061 case BUILT_IN_STRNLEN:
10062 target = expand_builtin_strnlen (exp, target, target_mode);
10063 if (target)
10064 return target;
10065 break;
10067 case BUILT_IN_STRCAT:
10068 target = expand_builtin_strcat (exp);
10069 if (target)
10070 return target;
10071 break;
10073 case BUILT_IN_GETTEXT:
10074 case BUILT_IN_PUTS:
10075 case BUILT_IN_PUTS_UNLOCKED:
10076 case BUILT_IN_STRDUP:
10077 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
10078 check_read_access (exp, CALL_EXPR_ARG (exp, 0));
10079 break;
10081 case BUILT_IN_INDEX:
10082 case BUILT_IN_RINDEX:
10083 case BUILT_IN_STRCHR:
10084 case BUILT_IN_STRRCHR:
10085 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
10086 check_read_access (exp, CALL_EXPR_ARG (exp, 0));
10087 break;
10089 case BUILT_IN_FPUTS:
10090 case BUILT_IN_FPUTS_UNLOCKED:
10091 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
10092 check_read_access (exp, CALL_EXPR_ARG (exp, 0));
10093 break;
10095 case BUILT_IN_STRNDUP:
10096 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
10097 check_read_access (exp, CALL_EXPR_ARG (exp, 0), CALL_EXPR_ARG (exp, 1));
10098 break;
10100 case BUILT_IN_STRCASECMP:
10101 case BUILT_IN_STRPBRK:
10102 case BUILT_IN_STRSPN:
10103 case BUILT_IN_STRCSPN:
10104 case BUILT_IN_STRSTR:
10105 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
10107 check_read_access (exp, CALL_EXPR_ARG (exp, 0));
10108 check_read_access (exp, CALL_EXPR_ARG (exp, 1));
10110 break;
10112 case BUILT_IN_STRCPY:
10113 target = expand_builtin_strcpy (exp, target);
10114 if (target)
10115 return target;
10116 break;
10118 case BUILT_IN_STRNCAT:
10119 target = expand_builtin_strncat (exp, target);
10120 if (target)
10121 return target;
10122 break;
10124 case BUILT_IN_STRNCPY:
10125 target = expand_builtin_strncpy (exp, target);
10126 if (target)
10127 return target;
10128 break;
10130 case BUILT_IN_STPCPY:
10131 target = expand_builtin_stpcpy (exp, target, mode);
10132 if (target)
10133 return target;
10134 break;
10136 case BUILT_IN_STPNCPY:
10137 target = expand_builtin_stpncpy (exp, target);
10138 if (target)
10139 return target;
10140 break;
10142 case BUILT_IN_MEMCHR:
10143 target = expand_builtin_memchr (exp, target);
10144 if (target)
10145 return target;
10146 break;
10148 case BUILT_IN_MEMCPY:
10149 target = expand_builtin_memcpy (exp, target);
10150 if (target)
10151 return target;
10152 break;
10154 case BUILT_IN_MEMMOVE:
10155 target = expand_builtin_memmove (exp, target);
10156 if (target)
10157 return target;
10158 break;
10160 case BUILT_IN_MEMPCPY:
10161 target = expand_builtin_mempcpy (exp, target);
10162 if (target)
10163 return target;
10164 break;
10166 case BUILT_IN_MEMSET:
10167 target = expand_builtin_memset (exp, target, mode);
10168 if (target)
10169 return target;
10170 break;
10172 case BUILT_IN_BZERO:
10173 target = expand_builtin_bzero (exp);
10174 if (target)
10175 return target;
10176 break;
10178 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
10179 back to a BUILT_IN_STRCMP. Remember to delete the 3rd parameter
10180 when changing it to a strcmp call. */
10181 case BUILT_IN_STRCMP_EQ:
10182 target = expand_builtin_memcmp (exp, target, true);
10183 if (target)
10184 return target;
10186 /* Change this call back to a BUILT_IN_STRCMP. */
10187 TREE_OPERAND (exp, 1)
10188 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRCMP));
10190 /* Delete the last parameter. */
10191 unsigned int i;
10192 vec<tree, va_gc> *arg_vec;
10193 vec_alloc (arg_vec, 2);
10194 for (i = 0; i < 2; i++)
10195 arg_vec->quick_push (CALL_EXPR_ARG (exp, i));
10196 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), arg_vec);
10197 /* FALLTHROUGH */
10199 case BUILT_IN_STRCMP:
10200 target = expand_builtin_strcmp (exp, target);
10201 if (target)
10202 return target;
10203 break;
10205 /* Expand it as BUILT_IN_MEMCMP_EQ first. If not successful, change it
10206 back to a BUILT_IN_STRNCMP. */
10207 case BUILT_IN_STRNCMP_EQ:
10208 target = expand_builtin_memcmp (exp, target, true);
10209 if (target)
10210 return target;
10212 /* Change it back to a BUILT_IN_STRNCMP. */
10213 TREE_OPERAND (exp, 1)
10214 = build_fold_addr_expr (builtin_decl_explicit (BUILT_IN_STRNCMP));
10215 /* FALLTHROUGH */
10217 case BUILT_IN_STRNCMP:
10218 target = expand_builtin_strncmp (exp, target, mode);
10219 if (target)
10220 return target;
10221 break;
10223 case BUILT_IN_BCMP:
10224 case BUILT_IN_MEMCMP:
10225 case BUILT_IN_MEMCMP_EQ:
10226 target = expand_builtin_memcmp (exp, target, fcode == BUILT_IN_MEMCMP_EQ);
10227 if (target)
10228 return target;
10229 if (fcode == BUILT_IN_MEMCMP_EQ)
10231 tree newdecl = builtin_decl_explicit (BUILT_IN_MEMCMP);
10232 TREE_OPERAND (exp, 1) = build_fold_addr_expr (newdecl);
10234 break;
10236 case BUILT_IN_SETJMP:
10237 /* This should have been lowered to the builtins below. */
10238 gcc_unreachable ();
10240 case BUILT_IN_SETJMP_SETUP:
10241 /* __builtin_setjmp_setup is passed a pointer to an array of five words
10242 and the receiver label. */
10243 if (validate_arglist (exp, POINTER_TYPE, POINTER_TYPE, VOID_TYPE))
10245 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
10246 VOIDmode, EXPAND_NORMAL);
10247 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 1), 0);
10248 rtx_insn *label_r = label_rtx (label);
10250 /* This is copied from the handling of non-local gotos. */
10251 expand_builtin_setjmp_setup (buf_addr, label_r);
10252 nonlocal_goto_handler_labels
10253 = gen_rtx_INSN_LIST (VOIDmode, label_r,
10254 nonlocal_goto_handler_labels);
10255 /* ??? Do not let expand_label treat us as such since we would
10256 not want to be both on the list of non-local labels and on
10257 the list of forced labels. */
10258 FORCED_LABEL (label) = 0;
10259 return const0_rtx;
10261 break;
10263 case BUILT_IN_SETJMP_RECEIVER:
10264 /* __builtin_setjmp_receiver is passed the receiver label. */
10265 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
10267 tree label = TREE_OPERAND (CALL_EXPR_ARG (exp, 0), 0);
10268 rtx_insn *label_r = label_rtx (label);
10270 expand_builtin_setjmp_receiver (label_r);
10271 return const0_rtx;
10273 break;
10275 /* __builtin_longjmp is passed a pointer to an array of five words.
10276 It's similar to the C library longjmp function but works with
10277 __builtin_setjmp above. */
10278 case BUILT_IN_LONGJMP:
10279 if (validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
10281 rtx buf_addr = expand_expr (CALL_EXPR_ARG (exp, 0), subtarget,
10282 VOIDmode, EXPAND_NORMAL);
10283 rtx value = expand_normal (CALL_EXPR_ARG (exp, 1));
10285 if (value != const1_rtx)
10287 error ("%<__builtin_longjmp%> second argument must be 1");
10288 return const0_rtx;
10291 expand_builtin_longjmp (buf_addr, value);
10292 return const0_rtx;
10294 break;
10296 case BUILT_IN_NONLOCAL_GOTO:
10297 target = expand_builtin_nonlocal_goto (exp);
10298 if (target)
10299 return target;
10300 break;
10302 /* This updates the setjmp buffer that is its argument with the value
10303 of the current stack pointer. */
10304 case BUILT_IN_UPDATE_SETJMP_BUF:
10305 if (validate_arglist (exp, POINTER_TYPE, VOID_TYPE))
10307 rtx buf_addr
10308 = expand_normal (CALL_EXPR_ARG (exp, 0));
10310 expand_builtin_update_setjmp_buf (buf_addr);
10311 return const0_rtx;
10313 break;
10315 case BUILT_IN_TRAP:
10316 expand_builtin_trap ();
10317 return const0_rtx;
10319 case BUILT_IN_UNREACHABLE:
10320 expand_builtin_unreachable ();
10321 return const0_rtx;
10323 CASE_FLT_FN (BUILT_IN_SIGNBIT):
10324 case BUILT_IN_SIGNBITD32:
10325 case BUILT_IN_SIGNBITD64:
10326 case BUILT_IN_SIGNBITD128:
10327 target = expand_builtin_signbit (exp, target);
10328 if (target)
10329 return target;
10330 break;
10332 /* Various hooks for the DWARF 2 __throw routine. */
10333 case BUILT_IN_UNWIND_INIT:
10334 expand_builtin_unwind_init ();
10335 return const0_rtx;
10336 case BUILT_IN_DWARF_CFA:
10337 return virtual_cfa_rtx;
10338 #ifdef DWARF2_UNWIND_INFO
10339 case BUILT_IN_DWARF_SP_COLUMN:
10340 return expand_builtin_dwarf_sp_column ();
10341 case BUILT_IN_INIT_DWARF_REG_SIZES:
10342 expand_builtin_init_dwarf_reg_sizes (CALL_EXPR_ARG (exp, 0));
10343 return const0_rtx;
10344 #endif
10345 case BUILT_IN_FROB_RETURN_ADDR:
10346 return expand_builtin_frob_return_addr (CALL_EXPR_ARG (exp, 0));
10347 case BUILT_IN_EXTRACT_RETURN_ADDR:
10348 return expand_builtin_extract_return_addr (CALL_EXPR_ARG (exp, 0));
10349 case BUILT_IN_EH_RETURN:
10350 expand_builtin_eh_return (CALL_EXPR_ARG (exp, 0),
10351 CALL_EXPR_ARG (exp, 1));
10352 return const0_rtx;
10353 case BUILT_IN_EH_RETURN_DATA_REGNO:
10354 return expand_builtin_eh_return_data_regno (exp);
10355 case BUILT_IN_EXTEND_POINTER:
10356 return expand_builtin_extend_pointer (CALL_EXPR_ARG (exp, 0));
10357 case BUILT_IN_EH_POINTER:
10358 return expand_builtin_eh_pointer (exp);
10359 case BUILT_IN_EH_FILTER:
10360 return expand_builtin_eh_filter (exp);
10361 case BUILT_IN_EH_COPY_VALUES:
10362 return expand_builtin_eh_copy_values (exp);
10364 case BUILT_IN_VA_START:
10365 return expand_builtin_va_start (exp);
10366 case BUILT_IN_VA_END:
10367 return expand_builtin_va_end (exp);
10368 case BUILT_IN_VA_COPY:
10369 return expand_builtin_va_copy (exp);
10370 case BUILT_IN_EXPECT:
10371 return expand_builtin_expect (exp, target);
10372 case BUILT_IN_EXPECT_WITH_PROBABILITY:
10373 return expand_builtin_expect_with_probability (exp, target);
10374 case BUILT_IN_ASSUME_ALIGNED:
10375 return expand_builtin_assume_aligned (exp, target);
10376 case BUILT_IN_PREFETCH:
10377 expand_builtin_prefetch (exp);
10378 return const0_rtx;
10380 case BUILT_IN_INIT_TRAMPOLINE:
10381 return expand_builtin_init_trampoline (exp, true);
10382 case BUILT_IN_INIT_HEAP_TRAMPOLINE:
10383 return expand_builtin_init_trampoline (exp, false);
10384 case BUILT_IN_ADJUST_TRAMPOLINE:
10385 return expand_builtin_adjust_trampoline (exp);
10387 case BUILT_IN_INIT_DESCRIPTOR:
10388 return expand_builtin_init_descriptor (exp);
10389 case BUILT_IN_ADJUST_DESCRIPTOR:
10390 return expand_builtin_adjust_descriptor (exp);
10392 case BUILT_IN_FORK:
10393 case BUILT_IN_EXECL:
10394 case BUILT_IN_EXECV:
10395 case BUILT_IN_EXECLP:
10396 case BUILT_IN_EXECLE:
10397 case BUILT_IN_EXECVP:
10398 case BUILT_IN_EXECVE:
10399 target = expand_builtin_fork_or_exec (fndecl, exp, target, ignore);
10400 if (target)
10401 return target;
10402 break;
10404 case BUILT_IN_SYNC_FETCH_AND_ADD_1:
10405 case BUILT_IN_SYNC_FETCH_AND_ADD_2:
10406 case BUILT_IN_SYNC_FETCH_AND_ADD_4:
10407 case BUILT_IN_SYNC_FETCH_AND_ADD_8:
10408 case BUILT_IN_SYNC_FETCH_AND_ADD_16:
10409 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_ADD_1);
10410 target = expand_builtin_sync_operation (mode, exp, PLUS, false, target);
10411 if (target)
10412 return target;
10413 break;
10415 case BUILT_IN_SYNC_FETCH_AND_SUB_1:
10416 case BUILT_IN_SYNC_FETCH_AND_SUB_2:
10417 case BUILT_IN_SYNC_FETCH_AND_SUB_4:
10418 case BUILT_IN_SYNC_FETCH_AND_SUB_8:
10419 case BUILT_IN_SYNC_FETCH_AND_SUB_16:
10420 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_SUB_1);
10421 target = expand_builtin_sync_operation (mode, exp, MINUS, false, target);
10422 if (target)
10423 return target;
10424 break;
10426 case BUILT_IN_SYNC_FETCH_AND_OR_1:
10427 case BUILT_IN_SYNC_FETCH_AND_OR_2:
10428 case BUILT_IN_SYNC_FETCH_AND_OR_4:
10429 case BUILT_IN_SYNC_FETCH_AND_OR_8:
10430 case BUILT_IN_SYNC_FETCH_AND_OR_16:
10431 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_OR_1);
10432 target = expand_builtin_sync_operation (mode, exp, IOR, false, target);
10433 if (target)
10434 return target;
10435 break;
10437 case BUILT_IN_SYNC_FETCH_AND_AND_1:
10438 case BUILT_IN_SYNC_FETCH_AND_AND_2:
10439 case BUILT_IN_SYNC_FETCH_AND_AND_4:
10440 case BUILT_IN_SYNC_FETCH_AND_AND_8:
10441 case BUILT_IN_SYNC_FETCH_AND_AND_16:
10442 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_AND_1);
10443 target = expand_builtin_sync_operation (mode, exp, AND, false, target);
10444 if (target)
10445 return target;
10446 break;
10448 case BUILT_IN_SYNC_FETCH_AND_XOR_1:
10449 case BUILT_IN_SYNC_FETCH_AND_XOR_2:
10450 case BUILT_IN_SYNC_FETCH_AND_XOR_4:
10451 case BUILT_IN_SYNC_FETCH_AND_XOR_8:
10452 case BUILT_IN_SYNC_FETCH_AND_XOR_16:
10453 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_XOR_1);
10454 target = expand_builtin_sync_operation (mode, exp, XOR, false, target);
10455 if (target)
10456 return target;
10457 break;
10459 case BUILT_IN_SYNC_FETCH_AND_NAND_1:
10460 case BUILT_IN_SYNC_FETCH_AND_NAND_2:
10461 case BUILT_IN_SYNC_FETCH_AND_NAND_4:
10462 case BUILT_IN_SYNC_FETCH_AND_NAND_8:
10463 case BUILT_IN_SYNC_FETCH_AND_NAND_16:
10464 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_FETCH_AND_NAND_1);
10465 target = expand_builtin_sync_operation (mode, exp, NOT, false, target);
10466 if (target)
10467 return target;
10468 break;
10470 case BUILT_IN_SYNC_ADD_AND_FETCH_1:
10471 case BUILT_IN_SYNC_ADD_AND_FETCH_2:
10472 case BUILT_IN_SYNC_ADD_AND_FETCH_4:
10473 case BUILT_IN_SYNC_ADD_AND_FETCH_8:
10474 case BUILT_IN_SYNC_ADD_AND_FETCH_16:
10475 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_ADD_AND_FETCH_1);
10476 target = expand_builtin_sync_operation (mode, exp, PLUS, true, target);
10477 if (target)
10478 return target;
10479 break;
10481 case BUILT_IN_SYNC_SUB_AND_FETCH_1:
10482 case BUILT_IN_SYNC_SUB_AND_FETCH_2:
10483 case BUILT_IN_SYNC_SUB_AND_FETCH_4:
10484 case BUILT_IN_SYNC_SUB_AND_FETCH_8:
10485 case BUILT_IN_SYNC_SUB_AND_FETCH_16:
10486 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_SUB_AND_FETCH_1);
10487 target = expand_builtin_sync_operation (mode, exp, MINUS, true, target);
10488 if (target)
10489 return target;
10490 break;
10492 case BUILT_IN_SYNC_OR_AND_FETCH_1:
10493 case BUILT_IN_SYNC_OR_AND_FETCH_2:
10494 case BUILT_IN_SYNC_OR_AND_FETCH_4:
10495 case BUILT_IN_SYNC_OR_AND_FETCH_8:
10496 case BUILT_IN_SYNC_OR_AND_FETCH_16:
10497 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_OR_AND_FETCH_1);
10498 target = expand_builtin_sync_operation (mode, exp, IOR, true, target);
10499 if (target)
10500 return target;
10501 break;
10503 case BUILT_IN_SYNC_AND_AND_FETCH_1:
10504 case BUILT_IN_SYNC_AND_AND_FETCH_2:
10505 case BUILT_IN_SYNC_AND_AND_FETCH_4:
10506 case BUILT_IN_SYNC_AND_AND_FETCH_8:
10507 case BUILT_IN_SYNC_AND_AND_FETCH_16:
10508 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_AND_AND_FETCH_1);
10509 target = expand_builtin_sync_operation (mode, exp, AND, true, target);
10510 if (target)
10511 return target;
10512 break;
10514 case BUILT_IN_SYNC_XOR_AND_FETCH_1:
10515 case BUILT_IN_SYNC_XOR_AND_FETCH_2:
10516 case BUILT_IN_SYNC_XOR_AND_FETCH_4:
10517 case BUILT_IN_SYNC_XOR_AND_FETCH_8:
10518 case BUILT_IN_SYNC_XOR_AND_FETCH_16:
10519 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_XOR_AND_FETCH_1);
10520 target = expand_builtin_sync_operation (mode, exp, XOR, true, target);
10521 if (target)
10522 return target;
10523 break;
10525 case BUILT_IN_SYNC_NAND_AND_FETCH_1:
10526 case BUILT_IN_SYNC_NAND_AND_FETCH_2:
10527 case BUILT_IN_SYNC_NAND_AND_FETCH_4:
10528 case BUILT_IN_SYNC_NAND_AND_FETCH_8:
10529 case BUILT_IN_SYNC_NAND_AND_FETCH_16:
10530 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_NAND_AND_FETCH_1);
10531 target = expand_builtin_sync_operation (mode, exp, NOT, true, target);
10532 if (target)
10533 return target;
10534 break;
10536 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1:
10537 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_2:
10538 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_4:
10539 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_8:
10540 case BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_16:
10541 if (mode == VOIDmode)
10542 mode = TYPE_MODE (boolean_type_node);
10543 if (!target || !register_operand (target, mode))
10544 target = gen_reg_rtx (mode);
10546 mode = get_builtin_sync_mode
10547 (fcode - BUILT_IN_SYNC_BOOL_COMPARE_AND_SWAP_1);
10548 target = expand_builtin_compare_and_swap (mode, exp, true, target);
10549 if (target)
10550 return target;
10551 break;
10553 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1:
10554 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_2:
10555 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_4:
10556 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_8:
10557 case BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_16:
10558 mode = get_builtin_sync_mode
10559 (fcode - BUILT_IN_SYNC_VAL_COMPARE_AND_SWAP_1);
10560 target = expand_builtin_compare_and_swap (mode, exp, false, target);
10561 if (target)
10562 return target;
10563 break;
10565 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_1:
10566 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_2:
10567 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_4:
10568 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_8:
10569 case BUILT_IN_SYNC_LOCK_TEST_AND_SET_16:
10570 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_TEST_AND_SET_1);
10571 target = expand_builtin_sync_lock_test_and_set (mode, exp, target);
10572 if (target)
10573 return target;
10574 break;
10576 case BUILT_IN_SYNC_LOCK_RELEASE_1:
10577 case BUILT_IN_SYNC_LOCK_RELEASE_2:
10578 case BUILT_IN_SYNC_LOCK_RELEASE_4:
10579 case BUILT_IN_SYNC_LOCK_RELEASE_8:
10580 case BUILT_IN_SYNC_LOCK_RELEASE_16:
10581 mode = get_builtin_sync_mode (fcode - BUILT_IN_SYNC_LOCK_RELEASE_1);
10582 expand_builtin_sync_lock_release (mode, exp);
10583 return const0_rtx;
10585 case BUILT_IN_SYNC_SYNCHRONIZE:
10586 expand_builtin_sync_synchronize ();
10587 return const0_rtx;
10589 case BUILT_IN_ATOMIC_EXCHANGE_1:
10590 case BUILT_IN_ATOMIC_EXCHANGE_2:
10591 case BUILT_IN_ATOMIC_EXCHANGE_4:
10592 case BUILT_IN_ATOMIC_EXCHANGE_8:
10593 case BUILT_IN_ATOMIC_EXCHANGE_16:
10594 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_EXCHANGE_1);
10595 target = expand_builtin_atomic_exchange (mode, exp, target);
10596 if (target)
10597 return target;
10598 break;
10600 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1:
10601 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_2:
10602 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_4:
10603 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_8:
10604 case BUILT_IN_ATOMIC_COMPARE_EXCHANGE_16:
10606 unsigned int nargs, z;
10607 vec<tree, va_gc> *vec;
10609 mode =
10610 get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_COMPARE_EXCHANGE_1);
10611 target = expand_builtin_atomic_compare_exchange (mode, exp, target);
10612 if (target)
10613 return target;
10615 /* If this is turned into an external library call, the weak parameter
10616 must be dropped to match the expected parameter list. */
10617 nargs = call_expr_nargs (exp);
10618 vec_alloc (vec, nargs - 1);
10619 for (z = 0; z < 3; z++)
10620 vec->quick_push (CALL_EXPR_ARG (exp, z));
10621 /* Skip the boolean weak parameter. */
10622 for (z = 4; z < 6; z++)
10623 vec->quick_push (CALL_EXPR_ARG (exp, z));
10624 exp = build_call_vec (TREE_TYPE (exp), CALL_EXPR_FN (exp), vec);
10625 break;
10628 case BUILT_IN_ATOMIC_LOAD_1:
10629 case BUILT_IN_ATOMIC_LOAD_2:
10630 case BUILT_IN_ATOMIC_LOAD_4:
10631 case BUILT_IN_ATOMIC_LOAD_8:
10632 case BUILT_IN_ATOMIC_LOAD_16:
10633 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_LOAD_1);
10634 target = expand_builtin_atomic_load (mode, exp, target);
10635 if (target)
10636 return target;
10637 break;
10639 case BUILT_IN_ATOMIC_STORE_1:
10640 case BUILT_IN_ATOMIC_STORE_2:
10641 case BUILT_IN_ATOMIC_STORE_4:
10642 case BUILT_IN_ATOMIC_STORE_8:
10643 case BUILT_IN_ATOMIC_STORE_16:
10644 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_STORE_1);
10645 target = expand_builtin_atomic_store (mode, exp);
10646 if (target)
10647 return const0_rtx;
10648 break;
10650 case BUILT_IN_ATOMIC_ADD_FETCH_1:
10651 case BUILT_IN_ATOMIC_ADD_FETCH_2:
10652 case BUILT_IN_ATOMIC_ADD_FETCH_4:
10653 case BUILT_IN_ATOMIC_ADD_FETCH_8:
10654 case BUILT_IN_ATOMIC_ADD_FETCH_16:
10656 enum built_in_function lib;
10657 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1);
10658 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_ADD_1 +
10659 (fcode - BUILT_IN_ATOMIC_ADD_FETCH_1));
10660 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, true,
10661 ignore, lib);
10662 if (target)
10663 return target;
10664 break;
10666 case BUILT_IN_ATOMIC_SUB_FETCH_1:
10667 case BUILT_IN_ATOMIC_SUB_FETCH_2:
10668 case BUILT_IN_ATOMIC_SUB_FETCH_4:
10669 case BUILT_IN_ATOMIC_SUB_FETCH_8:
10670 case BUILT_IN_ATOMIC_SUB_FETCH_16:
10672 enum built_in_function lib;
10673 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1);
10674 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_SUB_1 +
10675 (fcode - BUILT_IN_ATOMIC_SUB_FETCH_1));
10676 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, true,
10677 ignore, lib);
10678 if (target)
10679 return target;
10680 break;
10682 case BUILT_IN_ATOMIC_AND_FETCH_1:
10683 case BUILT_IN_ATOMIC_AND_FETCH_2:
10684 case BUILT_IN_ATOMIC_AND_FETCH_4:
10685 case BUILT_IN_ATOMIC_AND_FETCH_8:
10686 case BUILT_IN_ATOMIC_AND_FETCH_16:
10688 enum built_in_function lib;
10689 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_AND_FETCH_1);
10690 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_AND_1 +
10691 (fcode - BUILT_IN_ATOMIC_AND_FETCH_1));
10692 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, true,
10693 ignore, lib);
10694 if (target)
10695 return target;
10696 break;
10698 case BUILT_IN_ATOMIC_NAND_FETCH_1:
10699 case BUILT_IN_ATOMIC_NAND_FETCH_2:
10700 case BUILT_IN_ATOMIC_NAND_FETCH_4:
10701 case BUILT_IN_ATOMIC_NAND_FETCH_8:
10702 case BUILT_IN_ATOMIC_NAND_FETCH_16:
10704 enum built_in_function lib;
10705 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1);
10706 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_NAND_1 +
10707 (fcode - BUILT_IN_ATOMIC_NAND_FETCH_1));
10708 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, true,
10709 ignore, lib);
10710 if (target)
10711 return target;
10712 break;
10714 case BUILT_IN_ATOMIC_XOR_FETCH_1:
10715 case BUILT_IN_ATOMIC_XOR_FETCH_2:
10716 case BUILT_IN_ATOMIC_XOR_FETCH_4:
10717 case BUILT_IN_ATOMIC_XOR_FETCH_8:
10718 case BUILT_IN_ATOMIC_XOR_FETCH_16:
10720 enum built_in_function lib;
10721 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1);
10722 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_XOR_1 +
10723 (fcode - BUILT_IN_ATOMIC_XOR_FETCH_1));
10724 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, true,
10725 ignore, lib);
10726 if (target)
10727 return target;
10728 break;
10730 case BUILT_IN_ATOMIC_OR_FETCH_1:
10731 case BUILT_IN_ATOMIC_OR_FETCH_2:
10732 case BUILT_IN_ATOMIC_OR_FETCH_4:
10733 case BUILT_IN_ATOMIC_OR_FETCH_8:
10734 case BUILT_IN_ATOMIC_OR_FETCH_16:
10736 enum built_in_function lib;
10737 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_OR_FETCH_1);
10738 lib = (enum built_in_function)((int)BUILT_IN_ATOMIC_FETCH_OR_1 +
10739 (fcode - BUILT_IN_ATOMIC_OR_FETCH_1));
10740 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, true,
10741 ignore, lib);
10742 if (target)
10743 return target;
10744 break;
10746 case BUILT_IN_ATOMIC_FETCH_ADD_1:
10747 case BUILT_IN_ATOMIC_FETCH_ADD_2:
10748 case BUILT_IN_ATOMIC_FETCH_ADD_4:
10749 case BUILT_IN_ATOMIC_FETCH_ADD_8:
10750 case BUILT_IN_ATOMIC_FETCH_ADD_16:
10751 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_ADD_1);
10752 target = expand_builtin_atomic_fetch_op (mode, exp, target, PLUS, false,
10753 ignore, BUILT_IN_NONE);
10754 if (target)
10755 return target;
10756 break;
10758 case BUILT_IN_ATOMIC_FETCH_SUB_1:
10759 case BUILT_IN_ATOMIC_FETCH_SUB_2:
10760 case BUILT_IN_ATOMIC_FETCH_SUB_4:
10761 case BUILT_IN_ATOMIC_FETCH_SUB_8:
10762 case BUILT_IN_ATOMIC_FETCH_SUB_16:
10763 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_SUB_1);
10764 target = expand_builtin_atomic_fetch_op (mode, exp, target, MINUS, false,
10765 ignore, BUILT_IN_NONE);
10766 if (target)
10767 return target;
10768 break;
10770 case BUILT_IN_ATOMIC_FETCH_AND_1:
10771 case BUILT_IN_ATOMIC_FETCH_AND_2:
10772 case BUILT_IN_ATOMIC_FETCH_AND_4:
10773 case BUILT_IN_ATOMIC_FETCH_AND_8:
10774 case BUILT_IN_ATOMIC_FETCH_AND_16:
10775 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_AND_1);
10776 target = expand_builtin_atomic_fetch_op (mode, exp, target, AND, false,
10777 ignore, BUILT_IN_NONE);
10778 if (target)
10779 return target;
10780 break;
10782 case BUILT_IN_ATOMIC_FETCH_NAND_1:
10783 case BUILT_IN_ATOMIC_FETCH_NAND_2:
10784 case BUILT_IN_ATOMIC_FETCH_NAND_4:
10785 case BUILT_IN_ATOMIC_FETCH_NAND_8:
10786 case BUILT_IN_ATOMIC_FETCH_NAND_16:
10787 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_NAND_1);
10788 target = expand_builtin_atomic_fetch_op (mode, exp, target, NOT, false,
10789 ignore, BUILT_IN_NONE);
10790 if (target)
10791 return target;
10792 break;
10794 case BUILT_IN_ATOMIC_FETCH_XOR_1:
10795 case BUILT_IN_ATOMIC_FETCH_XOR_2:
10796 case BUILT_IN_ATOMIC_FETCH_XOR_4:
10797 case BUILT_IN_ATOMIC_FETCH_XOR_8:
10798 case BUILT_IN_ATOMIC_FETCH_XOR_16:
10799 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_XOR_1);
10800 target = expand_builtin_atomic_fetch_op (mode, exp, target, XOR, false,
10801 ignore, BUILT_IN_NONE);
10802 if (target)
10803 return target;
10804 break;
10806 case BUILT_IN_ATOMIC_FETCH_OR_1:
10807 case BUILT_IN_ATOMIC_FETCH_OR_2:
10808 case BUILT_IN_ATOMIC_FETCH_OR_4:
10809 case BUILT_IN_ATOMIC_FETCH_OR_8:
10810 case BUILT_IN_ATOMIC_FETCH_OR_16:
10811 mode = get_builtin_sync_mode (fcode - BUILT_IN_ATOMIC_FETCH_OR_1);
10812 target = expand_builtin_atomic_fetch_op (mode, exp, target, IOR, false,
10813 ignore, BUILT_IN_NONE);
10814 if (target)
10815 return target;
10816 break;
10818 case BUILT_IN_ATOMIC_TEST_AND_SET:
10819 return expand_builtin_atomic_test_and_set (exp, target);
10821 case BUILT_IN_ATOMIC_CLEAR:
10822 return expand_builtin_atomic_clear (exp);
10824 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
10825 return expand_builtin_atomic_always_lock_free (exp);
10827 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
10828 target = expand_builtin_atomic_is_lock_free (exp);
10829 if (target)
10830 return target;
10831 break;
10833 case BUILT_IN_ATOMIC_THREAD_FENCE:
10834 expand_builtin_atomic_thread_fence (exp);
10835 return const0_rtx;
10837 case BUILT_IN_ATOMIC_SIGNAL_FENCE:
10838 expand_builtin_atomic_signal_fence (exp);
10839 return const0_rtx;
10841 case BUILT_IN_OBJECT_SIZE:
10842 return expand_builtin_object_size (exp);
10844 case BUILT_IN_MEMCPY_CHK:
10845 case BUILT_IN_MEMPCPY_CHK:
10846 case BUILT_IN_MEMMOVE_CHK:
10847 case BUILT_IN_MEMSET_CHK:
10848 target = expand_builtin_memory_chk (exp, target, mode, fcode);
10849 if (target)
10850 return target;
10851 break;
10853 case BUILT_IN_STRCPY_CHK:
10854 case BUILT_IN_STPCPY_CHK:
10855 case BUILT_IN_STRNCPY_CHK:
10856 case BUILT_IN_STPNCPY_CHK:
10857 case BUILT_IN_STRCAT_CHK:
10858 case BUILT_IN_STRNCAT_CHK:
10859 case BUILT_IN_SNPRINTF_CHK:
10860 case BUILT_IN_VSNPRINTF_CHK:
10861 maybe_emit_chk_warning (exp, fcode);
10862 break;
10864 case BUILT_IN_SPRINTF_CHK:
10865 case BUILT_IN_VSPRINTF_CHK:
10866 maybe_emit_sprintf_chk_warning (exp, fcode);
10867 break;
10869 case BUILT_IN_THREAD_POINTER:
10870 return expand_builtin_thread_pointer (exp, target);
10872 case BUILT_IN_SET_THREAD_POINTER:
10873 expand_builtin_set_thread_pointer (exp);
10874 return const0_rtx;
10876 case BUILT_IN_ACC_ON_DEVICE:
10877 /* Do library call, if we failed to expand the builtin when
10878 folding. */
10879 break;
10881 case BUILT_IN_GOACC_PARLEVEL_ID:
10882 case BUILT_IN_GOACC_PARLEVEL_SIZE:
10883 return expand_builtin_goacc_parlevel_id_size (exp, target, ignore);
10885 case BUILT_IN_SPECULATION_SAFE_VALUE_PTR:
10886 return expand_speculation_safe_value (VOIDmode, exp, target, ignore);
10888 case BUILT_IN_SPECULATION_SAFE_VALUE_1:
10889 case BUILT_IN_SPECULATION_SAFE_VALUE_2:
10890 case BUILT_IN_SPECULATION_SAFE_VALUE_4:
10891 case BUILT_IN_SPECULATION_SAFE_VALUE_8:
10892 case BUILT_IN_SPECULATION_SAFE_VALUE_16:
10893 mode = get_builtin_sync_mode (fcode - BUILT_IN_SPECULATION_SAFE_VALUE_1);
10894 return expand_speculation_safe_value (mode, exp, target, ignore);
10896 default: /* just do library call, if unknown builtin */
10897 break;
10900 /* The switch statement above can drop through to cause the function
10901 to be called normally. */
10902 return expand_call (exp, target, ignore);
10905 /* Determine whether a tree node represents a call to a built-in
10906 function. If the tree T is a call to a built-in function with
10907 the right number of arguments of the appropriate types, return
10908 the DECL_FUNCTION_CODE of the call, e.g. BUILT_IN_SQRT.
10909 Otherwise the return value is END_BUILTINS. */
10911 enum built_in_function
10912 builtin_mathfn_code (const_tree t)
10914 const_tree fndecl, arg, parmlist;
10915 const_tree argtype, parmtype;
10916 const_call_expr_arg_iterator iter;
10918 if (TREE_CODE (t) != CALL_EXPR)
10919 return END_BUILTINS;
10921 fndecl = get_callee_fndecl (t);
10922 if (fndecl == NULL_TREE || !fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
10923 return END_BUILTINS;
10925 parmlist = TYPE_ARG_TYPES (TREE_TYPE (fndecl));
10926 init_const_call_expr_arg_iterator (t, &iter);
10927 for (; parmlist; parmlist = TREE_CHAIN (parmlist))
10929 /* If a function doesn't take a variable number of arguments,
10930 the last element in the list will have type `void'. */
10931 parmtype = TREE_VALUE (parmlist);
10932 if (VOID_TYPE_P (parmtype))
10934 if (more_const_call_expr_args_p (&iter))
10935 return END_BUILTINS;
10936 return DECL_FUNCTION_CODE (fndecl);
10939 if (! more_const_call_expr_args_p (&iter))
10940 return END_BUILTINS;
10942 arg = next_const_call_expr_arg (&iter);
10943 argtype = TREE_TYPE (arg);
10945 if (SCALAR_FLOAT_TYPE_P (parmtype))
10947 if (! SCALAR_FLOAT_TYPE_P (argtype))
10948 return END_BUILTINS;
10950 else if (COMPLEX_FLOAT_TYPE_P (parmtype))
10952 if (! COMPLEX_FLOAT_TYPE_P (argtype))
10953 return END_BUILTINS;
10955 else if (POINTER_TYPE_P (parmtype))
10957 if (! POINTER_TYPE_P (argtype))
10958 return END_BUILTINS;
10960 else if (INTEGRAL_TYPE_P (parmtype))
10962 if (! INTEGRAL_TYPE_P (argtype))
10963 return END_BUILTINS;
10965 else
10966 return END_BUILTINS;
10969 /* Variable-length argument list. */
10970 return DECL_FUNCTION_CODE (fndecl);
10973 /* Fold a call to __builtin_constant_p, if we know its argument ARG will
10974 evaluate to a constant. */
10976 static tree
10977 fold_builtin_constant_p (tree arg)
10979 /* We return 1 for a numeric type that's known to be a constant
10980 value at compile-time or for an aggregate type that's a
10981 literal constant. */
10982 STRIP_NOPS (arg);
10984 /* If we know this is a constant, emit the constant of one. */
10985 if (CONSTANT_CLASS_P (arg)
10986 || (TREE_CODE (arg) == CONSTRUCTOR
10987 && TREE_CONSTANT (arg)))
10988 return integer_one_node;
10989 if (TREE_CODE (arg) == ADDR_EXPR)
10991 tree op = TREE_OPERAND (arg, 0);
10992 if (TREE_CODE (op) == STRING_CST
10993 || (TREE_CODE (op) == ARRAY_REF
10994 && integer_zerop (TREE_OPERAND (op, 1))
10995 && TREE_CODE (TREE_OPERAND (op, 0)) == STRING_CST))
10996 return integer_one_node;
10999 /* If this expression has side effects, show we don't know it to be a
11000 constant. Likewise if it's a pointer or aggregate type since in
11001 those case we only want literals, since those are only optimized
11002 when generating RTL, not later.
11003 And finally, if we are compiling an initializer, not code, we
11004 need to return a definite result now; there's not going to be any
11005 more optimization done. */
11006 if (TREE_SIDE_EFFECTS (arg)
11007 || AGGREGATE_TYPE_P (TREE_TYPE (arg))
11008 || POINTER_TYPE_P (TREE_TYPE (arg))
11009 || cfun == 0
11010 || folding_initializer
11011 || force_folding_builtin_constant_p)
11012 return integer_zero_node;
11014 return NULL_TREE;
11017 /* Create builtin_expect or builtin_expect_with_probability
11018 with PRED and EXPECTED as its arguments and return it as a truthvalue.
11019 Fortran FE can also produce builtin_expect with PREDICTOR as third argument.
11020 builtin_expect_with_probability instead uses third argument as PROBABILITY
11021 value. */
11023 static tree
11024 build_builtin_expect_predicate (location_t loc, tree pred, tree expected,
11025 tree predictor, tree probability)
11027 tree fn, arg_types, pred_type, expected_type, call_expr, ret_type;
11029 fn = builtin_decl_explicit (probability == NULL_TREE ? BUILT_IN_EXPECT
11030 : BUILT_IN_EXPECT_WITH_PROBABILITY);
11031 arg_types = TYPE_ARG_TYPES (TREE_TYPE (fn));
11032 ret_type = TREE_TYPE (TREE_TYPE (fn));
11033 pred_type = TREE_VALUE (arg_types);
11034 expected_type = TREE_VALUE (TREE_CHAIN (arg_types));
11036 pred = fold_convert_loc (loc, pred_type, pred);
11037 expected = fold_convert_loc (loc, expected_type, expected);
11039 if (probability)
11040 call_expr = build_call_expr_loc (loc, fn, 3, pred, expected, probability);
11041 else
11042 call_expr = build_call_expr_loc (loc, fn, predictor ? 3 : 2, pred, expected,
11043 predictor);
11045 return build2 (NE_EXPR, TREE_TYPE (pred), call_expr,
11046 build_int_cst (ret_type, 0));
11049 /* Fold a call to builtin_expect with arguments ARG0, ARG1, ARG2, ARG3. Return
11050 NULL_TREE if no simplification is possible. */
11052 tree
11053 fold_builtin_expect (location_t loc, tree arg0, tree arg1, tree arg2,
11054 tree arg3)
11056 tree inner, fndecl, inner_arg0;
11057 enum tree_code code;
11059 /* Distribute the expected value over short-circuiting operators.
11060 See through the cast from truthvalue_type_node to long. */
11061 inner_arg0 = arg0;
11062 while (CONVERT_EXPR_P (inner_arg0)
11063 && INTEGRAL_TYPE_P (TREE_TYPE (inner_arg0))
11064 && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (inner_arg0, 0))))
11065 inner_arg0 = TREE_OPERAND (inner_arg0, 0);
11067 /* If this is a builtin_expect within a builtin_expect keep the
11068 inner one. See through a comparison against a constant. It
11069 might have been added to create a thruthvalue. */
11070 inner = inner_arg0;
11072 if (COMPARISON_CLASS_P (inner)
11073 && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST)
11074 inner = TREE_OPERAND (inner, 0);
11076 if (TREE_CODE (inner) == CALL_EXPR
11077 && (fndecl = get_callee_fndecl (inner))
11078 && (fndecl_built_in_p (fndecl, BUILT_IN_EXPECT)
11079 || fndecl_built_in_p (fndecl, BUILT_IN_EXPECT_WITH_PROBABILITY)))
11080 return arg0;
11082 inner = inner_arg0;
11083 code = TREE_CODE (inner);
11084 if (code == TRUTH_ANDIF_EXPR || code == TRUTH_ORIF_EXPR)
11086 tree op0 = TREE_OPERAND (inner, 0);
11087 tree op1 = TREE_OPERAND (inner, 1);
11088 arg1 = save_expr (arg1);
11090 op0 = build_builtin_expect_predicate (loc, op0, arg1, arg2, arg3);
11091 op1 = build_builtin_expect_predicate (loc, op1, arg1, arg2, arg3);
11092 inner = build2 (code, TREE_TYPE (inner), op0, op1);
11094 return fold_convert_loc (loc, TREE_TYPE (arg0), inner);
11097 /* If the argument isn't invariant then there's nothing else we can do. */
11098 if (!TREE_CONSTANT (inner_arg0))
11099 return NULL_TREE;
11101 /* If we expect that a comparison against the argument will fold to
11102 a constant return the constant. In practice, this means a true
11103 constant or the address of a non-weak symbol. */
11104 inner = inner_arg0;
11105 STRIP_NOPS (inner);
11106 if (TREE_CODE (inner) == ADDR_EXPR)
11110 inner = TREE_OPERAND (inner, 0);
11112 while (TREE_CODE (inner) == COMPONENT_REF
11113 || TREE_CODE (inner) == ARRAY_REF);
11114 if (VAR_OR_FUNCTION_DECL_P (inner) && DECL_WEAK (inner))
11115 return NULL_TREE;
11118 /* Otherwise, ARG0 already has the proper type for the return value. */
11119 return arg0;
11122 /* Fold a call to __builtin_classify_type with argument ARG. */
11124 static tree
11125 fold_builtin_classify_type (tree arg)
11127 if (arg == 0)
11128 return build_int_cst (integer_type_node, no_type_class);
11130 return build_int_cst (integer_type_node, type_to_class (TREE_TYPE (arg)));
11133 /* Fold a call EXPR (which may be null) to __builtin_strlen with argument
11134 ARG. */
11136 static tree
11137 fold_builtin_strlen (location_t loc, tree expr, tree type, tree arg)
11139 if (!validate_arg (arg, POINTER_TYPE))
11140 return NULL_TREE;
11141 else
11143 c_strlen_data lendata = { };
11144 tree len = c_strlen (arg, 0, &lendata);
11146 if (len)
11147 return fold_convert_loc (loc, type, len);
11149 if (!lendata.decl)
11150 c_strlen (arg, 1, &lendata);
11152 if (lendata.decl)
11154 if (EXPR_HAS_LOCATION (arg))
11155 loc = EXPR_LOCATION (arg);
11156 else if (loc == UNKNOWN_LOCATION)
11157 loc = input_location;
11158 warn_string_no_nul (loc, expr, "strlen", arg, lendata.decl);
11161 return NULL_TREE;
11165 /* Fold a call to __builtin_inf or __builtin_huge_val. */
11167 static tree
11168 fold_builtin_inf (location_t loc, tree type, int warn)
11170 REAL_VALUE_TYPE real;
11172 /* __builtin_inff is intended to be usable to define INFINITY on all
11173 targets. If an infinity is not available, INFINITY expands "to a
11174 positive constant of type float that overflows at translation
11175 time", footnote "In this case, using INFINITY will violate the
11176 constraint in 6.4.4 and thus require a diagnostic." (C99 7.12#4).
11177 Thus we pedwarn to ensure this constraint violation is
11178 diagnosed. */
11179 if (!MODE_HAS_INFINITIES (TYPE_MODE (type)) && warn)
11180 pedwarn (loc, 0, "target format does not support infinity");
11182 real_inf (&real);
11183 return build_real (type, real);
11186 /* Fold function call to builtin sincos, sincosf, or sincosl. Return
11187 NULL_TREE if no simplification can be made. */
11189 static tree
11190 fold_builtin_sincos (location_t loc,
11191 tree arg0, tree arg1, tree arg2)
11193 tree type;
11194 tree fndecl, call = NULL_TREE;
11196 if (!validate_arg (arg0, REAL_TYPE)
11197 || !validate_arg (arg1, POINTER_TYPE)
11198 || !validate_arg (arg2, POINTER_TYPE))
11199 return NULL_TREE;
11201 type = TREE_TYPE (arg0);
11203 /* Calculate the result when the argument is a constant. */
11204 built_in_function fn = mathfn_built_in_2 (type, CFN_BUILT_IN_CEXPI);
11205 if (fn == END_BUILTINS)
11206 return NULL_TREE;
11208 /* Canonicalize sincos to cexpi. */
11209 if (TREE_CODE (arg0) == REAL_CST)
11211 tree complex_type = build_complex_type (type);
11212 call = fold_const_call (as_combined_fn (fn), complex_type, arg0);
11214 if (!call)
11216 if (!targetm.libc_has_function (function_c99_math_complex, type)
11217 || !builtin_decl_implicit_p (fn))
11218 return NULL_TREE;
11219 fndecl = builtin_decl_explicit (fn);
11220 call = build_call_expr_loc (loc, fndecl, 1, arg0);
11221 call = builtin_save_expr (call);
11224 tree ptype = build_pointer_type (type);
11225 arg1 = fold_convert (ptype, arg1);
11226 arg2 = fold_convert (ptype, arg2);
11227 return build2 (COMPOUND_EXPR, void_type_node,
11228 build2 (MODIFY_EXPR, void_type_node,
11229 build_fold_indirect_ref_loc (loc, arg1),
11230 fold_build1_loc (loc, IMAGPART_EXPR, type, call)),
11231 build2 (MODIFY_EXPR, void_type_node,
11232 build_fold_indirect_ref_loc (loc, arg2),
11233 fold_build1_loc (loc, REALPART_EXPR, type, call)));
11236 /* Fold function call to builtin memcmp with arguments ARG1 and ARG2.
11237 Return NULL_TREE if no simplification can be made. */
11239 static tree
11240 fold_builtin_memcmp (location_t loc, tree arg1, tree arg2, tree len)
11242 if (!validate_arg (arg1, POINTER_TYPE)
11243 || !validate_arg (arg2, POINTER_TYPE)
11244 || !validate_arg (len, INTEGER_TYPE))
11245 return NULL_TREE;
11247 /* If the LEN parameter is zero, return zero. */
11248 if (integer_zerop (len))
11249 return omit_two_operands_loc (loc, integer_type_node, integer_zero_node,
11250 arg1, arg2);
11252 /* If ARG1 and ARG2 are the same (and not volatile), return zero. */
11253 if (operand_equal_p (arg1, arg2, 0))
11254 return omit_one_operand_loc (loc, integer_type_node, integer_zero_node, len);
11256 /* If len parameter is one, return an expression corresponding to
11257 (*(const unsigned char*)arg1 - (const unsigned char*)arg2). */
11258 if (tree_fits_uhwi_p (len) && tree_to_uhwi (len) == 1)
11260 tree cst_uchar_node = build_type_variant (unsigned_char_type_node, 1, 0);
11261 tree cst_uchar_ptr_node
11262 = build_pointer_type_for_mode (cst_uchar_node, ptr_mode, true);
11264 tree ind1
11265 = fold_convert_loc (loc, integer_type_node,
11266 build1 (INDIRECT_REF, cst_uchar_node,
11267 fold_convert_loc (loc,
11268 cst_uchar_ptr_node,
11269 arg1)));
11270 tree ind2
11271 = fold_convert_loc (loc, integer_type_node,
11272 build1 (INDIRECT_REF, cst_uchar_node,
11273 fold_convert_loc (loc,
11274 cst_uchar_ptr_node,
11275 arg2)));
11276 return fold_build2_loc (loc, MINUS_EXPR, integer_type_node, ind1, ind2);
11279 return NULL_TREE;
11282 /* Fold a call to builtin isascii with argument ARG. */
11284 static tree
11285 fold_builtin_isascii (location_t loc, tree arg)
11287 if (!validate_arg (arg, INTEGER_TYPE))
11288 return NULL_TREE;
11289 else
11291 /* Transform isascii(c) -> ((c & ~0x7f) == 0). */
11292 arg = fold_build2 (BIT_AND_EXPR, integer_type_node, arg,
11293 build_int_cst (integer_type_node,
11294 ~ (unsigned HOST_WIDE_INT) 0x7f));
11295 return fold_build2_loc (loc, EQ_EXPR, integer_type_node,
11296 arg, integer_zero_node);
11300 /* Fold a call to builtin toascii with argument ARG. */
11302 static tree
11303 fold_builtin_toascii (location_t loc, tree arg)
11305 if (!validate_arg (arg, INTEGER_TYPE))
11306 return NULL_TREE;
11308 /* Transform toascii(c) -> (c & 0x7f). */
11309 return fold_build2_loc (loc, BIT_AND_EXPR, integer_type_node, arg,
11310 build_int_cst (integer_type_node, 0x7f));
11313 /* Fold a call to builtin isdigit with argument ARG. */
11315 static tree
11316 fold_builtin_isdigit (location_t loc, tree arg)
11318 if (!validate_arg (arg, INTEGER_TYPE))
11319 return NULL_TREE;
11320 else
11322 /* Transform isdigit(c) -> (unsigned)(c) - '0' <= 9. */
11323 /* According to the C standard, isdigit is unaffected by locale.
11324 However, it definitely is affected by the target character set. */
11325 unsigned HOST_WIDE_INT target_digit0
11326 = lang_hooks.to_target_charset ('0');
11328 if (target_digit0 == 0)
11329 return NULL_TREE;
11331 arg = fold_convert_loc (loc, unsigned_type_node, arg);
11332 arg = fold_build2 (MINUS_EXPR, unsigned_type_node, arg,
11333 build_int_cst (unsigned_type_node, target_digit0));
11334 return fold_build2_loc (loc, LE_EXPR, integer_type_node, arg,
11335 build_int_cst (unsigned_type_node, 9));
11339 /* Fold a call to fabs, fabsf or fabsl with argument ARG. */
11341 static tree
11342 fold_builtin_fabs (location_t loc, tree arg, tree type)
11344 if (!validate_arg (arg, REAL_TYPE))
11345 return NULL_TREE;
11347 arg = fold_convert_loc (loc, type, arg);
11348 return fold_build1_loc (loc, ABS_EXPR, type, arg);
11351 /* Fold a call to abs, labs, llabs or imaxabs with argument ARG. */
11353 static tree
11354 fold_builtin_abs (location_t loc, tree arg, tree type)
11356 if (!validate_arg (arg, INTEGER_TYPE))
11357 return NULL_TREE;
11359 arg = fold_convert_loc (loc, type, arg);
11360 return fold_build1_loc (loc, ABS_EXPR, type, arg);
11363 /* Fold a call to builtin carg(a+bi) -> atan2(b,a). */
11365 static tree
11366 fold_builtin_carg (location_t loc, tree arg, tree type)
11368 if (validate_arg (arg, COMPLEX_TYPE)
11369 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg))) == REAL_TYPE)
11371 tree atan2_fn = mathfn_built_in (type, BUILT_IN_ATAN2);
11373 if (atan2_fn)
11375 tree new_arg = builtin_save_expr (arg);
11376 tree r_arg = fold_build1_loc (loc, REALPART_EXPR, type, new_arg);
11377 tree i_arg = fold_build1_loc (loc, IMAGPART_EXPR, type, new_arg);
11378 return build_call_expr_loc (loc, atan2_fn, 2, i_arg, r_arg);
11382 return NULL_TREE;
11385 /* Fold a call to builtin frexp, we can assume the base is 2. */
11387 static tree
11388 fold_builtin_frexp (location_t loc, tree arg0, tree arg1, tree rettype)
11390 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
11391 return NULL_TREE;
11393 STRIP_NOPS (arg0);
11395 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
11396 return NULL_TREE;
11398 arg1 = build_fold_indirect_ref_loc (loc, arg1);
11400 /* Proceed if a valid pointer type was passed in. */
11401 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == integer_type_node)
11403 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
11404 tree frac, exp;
11406 switch (value->cl)
11408 case rvc_zero:
11409 /* For +-0, return (*exp = 0, +-0). */
11410 exp = integer_zero_node;
11411 frac = arg0;
11412 break;
11413 case rvc_nan:
11414 case rvc_inf:
11415 /* For +-NaN or +-Inf, *exp is unspecified, return arg0. */
11416 return omit_one_operand_loc (loc, rettype, arg0, arg1);
11417 case rvc_normal:
11419 /* Since the frexp function always expects base 2, and in
11420 GCC normalized significands are already in the range
11421 [0.5, 1.0), we have exactly what frexp wants. */
11422 REAL_VALUE_TYPE frac_rvt = *value;
11423 SET_REAL_EXP (&frac_rvt, 0);
11424 frac = build_real (rettype, frac_rvt);
11425 exp = build_int_cst (integer_type_node, REAL_EXP (value));
11427 break;
11428 default:
11429 gcc_unreachable ();
11432 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
11433 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1, exp);
11434 TREE_SIDE_EFFECTS (arg1) = 1;
11435 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1, frac);
11438 return NULL_TREE;
11441 /* Fold a call to builtin modf. */
11443 static tree
11444 fold_builtin_modf (location_t loc, tree arg0, tree arg1, tree rettype)
11446 if (! validate_arg (arg0, REAL_TYPE) || ! validate_arg (arg1, POINTER_TYPE))
11447 return NULL_TREE;
11449 STRIP_NOPS (arg0);
11451 if (!(TREE_CODE (arg0) == REAL_CST && ! TREE_OVERFLOW (arg0)))
11452 return NULL_TREE;
11454 arg1 = build_fold_indirect_ref_loc (loc, arg1);
11456 /* Proceed if a valid pointer type was passed in. */
11457 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg1)) == TYPE_MAIN_VARIANT (rettype))
11459 const REAL_VALUE_TYPE *const value = TREE_REAL_CST_PTR (arg0);
11460 REAL_VALUE_TYPE trunc, frac;
11462 switch (value->cl)
11464 case rvc_nan:
11465 case rvc_zero:
11466 /* For +-NaN or +-0, return (*arg1 = arg0, arg0). */
11467 trunc = frac = *value;
11468 break;
11469 case rvc_inf:
11470 /* For +-Inf, return (*arg1 = arg0, +-0). */
11471 frac = dconst0;
11472 frac.sign = value->sign;
11473 trunc = *value;
11474 break;
11475 case rvc_normal:
11476 /* Return (*arg1 = trunc(arg0), arg0-trunc(arg0)). */
11477 real_trunc (&trunc, VOIDmode, value);
11478 real_arithmetic (&frac, MINUS_EXPR, value, &trunc);
11479 /* If the original number was negative and already
11480 integral, then the fractional part is -0.0. */
11481 if (value->sign && frac.cl == rvc_zero)
11482 frac.sign = value->sign;
11483 break;
11486 /* Create the COMPOUND_EXPR (*arg1 = trunc, frac). */
11487 arg1 = fold_build2_loc (loc, MODIFY_EXPR, rettype, arg1,
11488 build_real (rettype, trunc));
11489 TREE_SIDE_EFFECTS (arg1) = 1;
11490 return fold_build2_loc (loc, COMPOUND_EXPR, rettype, arg1,
11491 build_real (rettype, frac));
11494 return NULL_TREE;
11497 /* Given a location LOC, an interclass builtin function decl FNDECL
11498 and its single argument ARG, return an folded expression computing
11499 the same, or NULL_TREE if we either couldn't or didn't want to fold
11500 (the latter happen if there's an RTL instruction available). */
11502 static tree
11503 fold_builtin_interclass_mathfn (location_t loc, tree fndecl, tree arg)
11505 machine_mode mode;
11507 if (!validate_arg (arg, REAL_TYPE))
11508 return NULL_TREE;
11510 if (interclass_mathfn_icode (arg, fndecl) != CODE_FOR_nothing)
11511 return NULL_TREE;
11513 mode = TYPE_MODE (TREE_TYPE (arg));
11515 bool is_ibm_extended = MODE_COMPOSITE_P (mode);
11517 /* If there is no optab, try generic code. */
11518 switch (DECL_FUNCTION_CODE (fndecl))
11520 tree result;
11522 CASE_FLT_FN (BUILT_IN_ISINF):
11524 /* isinf(x) -> isgreater(fabs(x),DBL_MAX). */
11525 tree const isgr_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
11526 tree type = TREE_TYPE (arg);
11527 REAL_VALUE_TYPE r;
11528 char buf[128];
11530 if (is_ibm_extended)
11532 /* NaN and Inf are encoded in the high-order double value
11533 only. The low-order value is not significant. */
11534 type = double_type_node;
11535 mode = DFmode;
11536 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
11538 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
11539 real_from_string (&r, buf);
11540 result = build_call_expr (isgr_fn, 2,
11541 fold_build1_loc (loc, ABS_EXPR, type, arg),
11542 build_real (type, r));
11543 return result;
11545 CASE_FLT_FN (BUILT_IN_FINITE):
11546 case BUILT_IN_ISFINITE:
11548 /* isfinite(x) -> islessequal(fabs(x),DBL_MAX). */
11549 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
11550 tree type = TREE_TYPE (arg);
11551 REAL_VALUE_TYPE r;
11552 char buf[128];
11554 if (is_ibm_extended)
11556 /* NaN and Inf are encoded in the high-order double value
11557 only. The low-order value is not significant. */
11558 type = double_type_node;
11559 mode = DFmode;
11560 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
11562 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
11563 real_from_string (&r, buf);
11564 result = build_call_expr (isle_fn, 2,
11565 fold_build1_loc (loc, ABS_EXPR, type, arg),
11566 build_real (type, r));
11567 /*result = fold_build2_loc (loc, UNGT_EXPR,
11568 TREE_TYPE (TREE_TYPE (fndecl)),
11569 fold_build1_loc (loc, ABS_EXPR, type, arg),
11570 build_real (type, r));
11571 result = fold_build1_loc (loc, TRUTH_NOT_EXPR,
11572 TREE_TYPE (TREE_TYPE (fndecl)),
11573 result);*/
11574 return result;
11576 case BUILT_IN_ISNORMAL:
11578 /* isnormal(x) -> isgreaterequal(fabs(x),DBL_MIN) &
11579 islessequal(fabs(x),DBL_MAX). */
11580 tree const isle_fn = builtin_decl_explicit (BUILT_IN_ISLESSEQUAL);
11581 tree type = TREE_TYPE (arg);
11582 tree orig_arg, max_exp, min_exp;
11583 machine_mode orig_mode = mode;
11584 REAL_VALUE_TYPE rmax, rmin;
11585 char buf[128];
11587 orig_arg = arg = builtin_save_expr (arg);
11588 if (is_ibm_extended)
11590 /* Use double to test the normal range of IBM extended
11591 precision. Emin for IBM extended precision is
11592 different to emin for IEEE double, being 53 higher
11593 since the low double exponent is at least 53 lower
11594 than the high double exponent. */
11595 type = double_type_node;
11596 mode = DFmode;
11597 arg = fold_build1_loc (loc, NOP_EXPR, type, arg);
11599 arg = fold_build1_loc (loc, ABS_EXPR, type, arg);
11601 get_max_float (REAL_MODE_FORMAT (mode), buf, sizeof (buf), false);
11602 real_from_string (&rmax, buf);
11603 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (orig_mode)->emin - 1);
11604 real_from_string (&rmin, buf);
11605 max_exp = build_real (type, rmax);
11606 min_exp = build_real (type, rmin);
11608 max_exp = build_call_expr (isle_fn, 2, arg, max_exp);
11609 if (is_ibm_extended)
11611 /* Testing the high end of the range is done just using
11612 the high double, using the same test as isfinite().
11613 For the subnormal end of the range we first test the
11614 high double, then if its magnitude is equal to the
11615 limit of 0x1p-969, we test whether the low double is
11616 non-zero and opposite sign to the high double. */
11617 tree const islt_fn = builtin_decl_explicit (BUILT_IN_ISLESS);
11618 tree const isgt_fn = builtin_decl_explicit (BUILT_IN_ISGREATER);
11619 tree gt_min = build_call_expr (isgt_fn, 2, arg, min_exp);
11620 tree eq_min = fold_build2 (EQ_EXPR, integer_type_node,
11621 arg, min_exp);
11622 tree as_complex = build1 (VIEW_CONVERT_EXPR,
11623 complex_double_type_node, orig_arg);
11624 tree hi_dbl = build1 (REALPART_EXPR, type, as_complex);
11625 tree lo_dbl = build1 (IMAGPART_EXPR, type, as_complex);
11626 tree zero = build_real (type, dconst0);
11627 tree hilt = build_call_expr (islt_fn, 2, hi_dbl, zero);
11628 tree lolt = build_call_expr (islt_fn, 2, lo_dbl, zero);
11629 tree logt = build_call_expr (isgt_fn, 2, lo_dbl, zero);
11630 tree ok_lo = fold_build1 (TRUTH_NOT_EXPR, integer_type_node,
11631 fold_build3 (COND_EXPR,
11632 integer_type_node,
11633 hilt, logt, lolt));
11634 eq_min = fold_build2 (TRUTH_ANDIF_EXPR, integer_type_node,
11635 eq_min, ok_lo);
11636 min_exp = fold_build2 (TRUTH_ORIF_EXPR, integer_type_node,
11637 gt_min, eq_min);
11639 else
11641 tree const isge_fn
11642 = builtin_decl_explicit (BUILT_IN_ISGREATEREQUAL);
11643 min_exp = build_call_expr (isge_fn, 2, arg, min_exp);
11645 result = fold_build2 (BIT_AND_EXPR, integer_type_node,
11646 max_exp, min_exp);
11647 return result;
11649 default:
11650 break;
11653 return NULL_TREE;
11656 /* Fold a call to __builtin_isnan(), __builtin_isinf, __builtin_finite.
11657 ARG is the argument for the call. */
11659 static tree
11660 fold_builtin_classify (location_t loc, tree fndecl, tree arg, int builtin_index)
11662 tree type = TREE_TYPE (TREE_TYPE (fndecl));
11664 if (!validate_arg (arg, REAL_TYPE))
11665 return NULL_TREE;
11667 switch (builtin_index)
11669 case BUILT_IN_ISINF:
11670 if (tree_expr_infinite_p (arg))
11671 return omit_one_operand_loc (loc, type, integer_one_node, arg);
11672 if (!tree_expr_maybe_infinite_p (arg))
11673 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
11674 return NULL_TREE;
11676 case BUILT_IN_ISINF_SIGN:
11678 /* isinf_sign(x) -> isinf(x) ? (signbit(x) ? -1 : 1) : 0 */
11679 /* In a boolean context, GCC will fold the inner COND_EXPR to
11680 1. So e.g. "if (isinf_sign(x))" would be folded to just
11681 "if (isinf(x) ? 1 : 0)" which becomes "if (isinf(x))". */
11682 tree signbit_fn = builtin_decl_explicit (BUILT_IN_SIGNBIT);
11683 tree isinf_fn = builtin_decl_explicit (BUILT_IN_ISINF);
11684 tree tmp = NULL_TREE;
11686 arg = builtin_save_expr (arg);
11688 if (signbit_fn && isinf_fn)
11690 tree signbit_call = build_call_expr_loc (loc, signbit_fn, 1, arg);
11691 tree isinf_call = build_call_expr_loc (loc, isinf_fn, 1, arg);
11693 signbit_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
11694 signbit_call, integer_zero_node);
11695 isinf_call = fold_build2_loc (loc, NE_EXPR, integer_type_node,
11696 isinf_call, integer_zero_node);
11698 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node, signbit_call,
11699 integer_minus_one_node, integer_one_node);
11700 tmp = fold_build3_loc (loc, COND_EXPR, integer_type_node,
11701 isinf_call, tmp,
11702 integer_zero_node);
11705 return tmp;
11708 case BUILT_IN_ISFINITE:
11709 if (tree_expr_finite_p (arg))
11710 return omit_one_operand_loc (loc, type, integer_one_node, arg);
11711 if (tree_expr_nan_p (arg) || tree_expr_infinite_p (arg))
11712 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
11713 return NULL_TREE;
11715 case BUILT_IN_ISNAN:
11716 if (tree_expr_nan_p (arg))
11717 return omit_one_operand_loc (loc, type, integer_one_node, arg);
11718 if (!tree_expr_maybe_nan_p (arg))
11719 return omit_one_operand_loc (loc, type, integer_zero_node, arg);
11722 bool is_ibm_extended = MODE_COMPOSITE_P (TYPE_MODE (TREE_TYPE (arg)));
11723 if (is_ibm_extended)
11725 /* NaN and Inf are encoded in the high-order double value
11726 only. The low-order value is not significant. */
11727 arg = fold_build1_loc (loc, NOP_EXPR, double_type_node, arg);
11730 arg = builtin_save_expr (arg);
11731 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg, arg);
11733 default:
11734 gcc_unreachable ();
11738 /* Fold a call to __builtin_fpclassify(int, int, int, int, int, ...).
11739 This builtin will generate code to return the appropriate floating
11740 point classification depending on the value of the floating point
11741 number passed in. The possible return values must be supplied as
11742 int arguments to the call in the following order: FP_NAN, FP_INFINITE,
11743 FP_NORMAL, FP_SUBNORMAL and FP_ZERO. The ellipses is for exactly
11744 one floating point argument which is "type generic". */
11746 static tree
11747 fold_builtin_fpclassify (location_t loc, tree *args, int nargs)
11749 tree fp_nan, fp_infinite, fp_normal, fp_subnormal, fp_zero,
11750 arg, type, res, tmp;
11751 machine_mode mode;
11752 REAL_VALUE_TYPE r;
11753 char buf[128];
11755 /* Verify the required arguments in the original call. */
11756 if (nargs != 6
11757 || !validate_arg (args[0], INTEGER_TYPE)
11758 || !validate_arg (args[1], INTEGER_TYPE)
11759 || !validate_arg (args[2], INTEGER_TYPE)
11760 || !validate_arg (args[3], INTEGER_TYPE)
11761 || !validate_arg (args[4], INTEGER_TYPE)
11762 || !validate_arg (args[5], REAL_TYPE))
11763 return NULL_TREE;
11765 fp_nan = args[0];
11766 fp_infinite = args[1];
11767 fp_normal = args[2];
11768 fp_subnormal = args[3];
11769 fp_zero = args[4];
11770 arg = args[5];
11771 type = TREE_TYPE (arg);
11772 mode = TYPE_MODE (type);
11773 arg = builtin_save_expr (fold_build1_loc (loc, ABS_EXPR, type, arg));
11775 /* fpclassify(x) ->
11776 isnan(x) ? FP_NAN :
11777 (fabs(x) == Inf ? FP_INFINITE :
11778 (fabs(x) >= DBL_MIN ? FP_NORMAL :
11779 (x == 0 ? FP_ZERO : FP_SUBNORMAL))). */
11781 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
11782 build_real (type, dconst0));
11783 res = fold_build3_loc (loc, COND_EXPR, integer_type_node,
11784 tmp, fp_zero, fp_subnormal);
11786 sprintf (buf, "0x1p%d", REAL_MODE_FORMAT (mode)->emin - 1);
11787 real_from_string (&r, buf);
11788 tmp = fold_build2_loc (loc, GE_EXPR, integer_type_node,
11789 arg, build_real (type, r));
11790 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, fp_normal, res);
11792 if (tree_expr_maybe_infinite_p (arg))
11794 real_inf (&r);
11795 tmp = fold_build2_loc (loc, EQ_EXPR, integer_type_node, arg,
11796 build_real (type, r));
11797 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp,
11798 fp_infinite, res);
11801 if (tree_expr_maybe_nan_p (arg))
11803 tmp = fold_build2_loc (loc, ORDERED_EXPR, integer_type_node, arg, arg);
11804 res = fold_build3_loc (loc, COND_EXPR, integer_type_node, tmp, res, fp_nan);
11807 return res;
11810 /* Fold a call to an unordered comparison function such as
11811 __builtin_isgreater(). FNDECL is the FUNCTION_DECL for the function
11812 being called and ARG0 and ARG1 are the arguments for the call.
11813 UNORDERED_CODE and ORDERED_CODE are comparison codes that give
11814 the opposite of the desired result. UNORDERED_CODE is used
11815 for modes that can hold NaNs and ORDERED_CODE is used for
11816 the rest. */
11818 static tree
11819 fold_builtin_unordered_cmp (location_t loc, tree fndecl, tree arg0, tree arg1,
11820 enum tree_code unordered_code,
11821 enum tree_code ordered_code)
11823 tree type = TREE_TYPE (TREE_TYPE (fndecl));
11824 enum tree_code code;
11825 tree type0, type1;
11826 enum tree_code code0, code1;
11827 tree cmp_type = NULL_TREE;
11829 type0 = TREE_TYPE (arg0);
11830 type1 = TREE_TYPE (arg1);
11832 code0 = TREE_CODE (type0);
11833 code1 = TREE_CODE (type1);
11835 if (code0 == REAL_TYPE && code1 == REAL_TYPE)
11836 /* Choose the wider of two real types. */
11837 cmp_type = TYPE_PRECISION (type0) >= TYPE_PRECISION (type1)
11838 ? type0 : type1;
11839 else if (code0 == REAL_TYPE && code1 == INTEGER_TYPE)
11840 cmp_type = type0;
11841 else if (code0 == INTEGER_TYPE && code1 == REAL_TYPE)
11842 cmp_type = type1;
11844 arg0 = fold_convert_loc (loc, cmp_type, arg0);
11845 arg1 = fold_convert_loc (loc, cmp_type, arg1);
11847 if (unordered_code == UNORDERED_EXPR)
11849 if (tree_expr_nan_p (arg0) || tree_expr_nan_p (arg1))
11850 return omit_two_operands_loc (loc, type, integer_one_node, arg0, arg1);
11851 if (!tree_expr_maybe_nan_p (arg0) && !tree_expr_maybe_nan_p (arg1))
11852 return omit_two_operands_loc (loc, type, integer_zero_node, arg0, arg1);
11853 return fold_build2_loc (loc, UNORDERED_EXPR, type, arg0, arg1);
11856 code = (tree_expr_maybe_nan_p (arg0) || tree_expr_maybe_nan_p (arg1))
11857 ? unordered_code : ordered_code;
11858 return fold_build1_loc (loc, TRUTH_NOT_EXPR, type,
11859 fold_build2_loc (loc, code, type, arg0, arg1));
11862 /* Fold __builtin_{,s,u}{add,sub,mul}{,l,ll}_overflow, either into normal
11863 arithmetics if it can never overflow, or into internal functions that
11864 return both result of arithmetics and overflowed boolean flag in
11865 a complex integer result, or some other check for overflow.
11866 Similarly fold __builtin_{add,sub,mul}_overflow_p to just the overflow
11867 checking part of that. */
11869 static tree
11870 fold_builtin_arith_overflow (location_t loc, enum built_in_function fcode,
11871 tree arg0, tree arg1, tree arg2)
11873 enum internal_fn ifn = IFN_LAST;
11874 /* The code of the expression corresponding to the built-in. */
11875 enum tree_code opcode = ERROR_MARK;
11876 bool ovf_only = false;
11878 switch (fcode)
11880 case BUILT_IN_ADD_OVERFLOW_P:
11881 ovf_only = true;
11882 /* FALLTHRU */
11883 case BUILT_IN_ADD_OVERFLOW:
11884 case BUILT_IN_SADD_OVERFLOW:
11885 case BUILT_IN_SADDL_OVERFLOW:
11886 case BUILT_IN_SADDLL_OVERFLOW:
11887 case BUILT_IN_UADD_OVERFLOW:
11888 case BUILT_IN_UADDL_OVERFLOW:
11889 case BUILT_IN_UADDLL_OVERFLOW:
11890 opcode = PLUS_EXPR;
11891 ifn = IFN_ADD_OVERFLOW;
11892 break;
11893 case BUILT_IN_SUB_OVERFLOW_P:
11894 ovf_only = true;
11895 /* FALLTHRU */
11896 case BUILT_IN_SUB_OVERFLOW:
11897 case BUILT_IN_SSUB_OVERFLOW:
11898 case BUILT_IN_SSUBL_OVERFLOW:
11899 case BUILT_IN_SSUBLL_OVERFLOW:
11900 case BUILT_IN_USUB_OVERFLOW:
11901 case BUILT_IN_USUBL_OVERFLOW:
11902 case BUILT_IN_USUBLL_OVERFLOW:
11903 opcode = MINUS_EXPR;
11904 ifn = IFN_SUB_OVERFLOW;
11905 break;
11906 case BUILT_IN_MUL_OVERFLOW_P:
11907 ovf_only = true;
11908 /* FALLTHRU */
11909 case BUILT_IN_MUL_OVERFLOW:
11910 case BUILT_IN_SMUL_OVERFLOW:
11911 case BUILT_IN_SMULL_OVERFLOW:
11912 case BUILT_IN_SMULLL_OVERFLOW:
11913 case BUILT_IN_UMUL_OVERFLOW:
11914 case BUILT_IN_UMULL_OVERFLOW:
11915 case BUILT_IN_UMULLL_OVERFLOW:
11916 opcode = MULT_EXPR;
11917 ifn = IFN_MUL_OVERFLOW;
11918 break;
11919 default:
11920 gcc_unreachable ();
11923 /* For the "generic" overloads, the first two arguments can have different
11924 types and the last argument determines the target type to use to check
11925 for overflow. The arguments of the other overloads all have the same
11926 type. */
11927 tree type = ovf_only ? TREE_TYPE (arg2) : TREE_TYPE (TREE_TYPE (arg2));
11929 /* For the __builtin_{add,sub,mul}_overflow_p builtins, when the first two
11930 arguments are constant, attempt to fold the built-in call into a constant
11931 expression indicating whether or not it detected an overflow. */
11932 if (ovf_only
11933 && TREE_CODE (arg0) == INTEGER_CST
11934 && TREE_CODE (arg1) == INTEGER_CST)
11935 /* Perform the computation in the target type and check for overflow. */
11936 return omit_one_operand_loc (loc, boolean_type_node,
11937 arith_overflowed_p (opcode, type, arg0, arg1)
11938 ? boolean_true_node : boolean_false_node,
11939 arg2);
11941 tree intres, ovfres;
11942 if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
11944 intres = fold_binary_loc (loc, opcode, type,
11945 fold_convert_loc (loc, type, arg0),
11946 fold_convert_loc (loc, type, arg1));
11947 if (TREE_OVERFLOW (intres))
11948 intres = drop_tree_overflow (intres);
11949 ovfres = (arith_overflowed_p (opcode, type, arg0, arg1)
11950 ? boolean_true_node : boolean_false_node);
11952 else
11954 tree ctype = build_complex_type (type);
11955 tree call = build_call_expr_internal_loc (loc, ifn, ctype, 2,
11956 arg0, arg1);
11957 tree tgt = save_expr (call);
11958 intres = build1_loc (loc, REALPART_EXPR, type, tgt);
11959 ovfres = build1_loc (loc, IMAGPART_EXPR, type, tgt);
11960 ovfres = fold_convert_loc (loc, boolean_type_node, ovfres);
11963 if (ovf_only)
11964 return omit_one_operand_loc (loc, boolean_type_node, ovfres, arg2);
11966 tree mem_arg2 = build_fold_indirect_ref_loc (loc, arg2);
11967 tree store
11968 = fold_build2_loc (loc, MODIFY_EXPR, void_type_node, mem_arg2, intres);
11969 return build2_loc (loc, COMPOUND_EXPR, boolean_type_node, store, ovfres);
11972 /* Fold a call to __builtin_FILE to a constant string. */
11974 static inline tree
11975 fold_builtin_FILE (location_t loc)
11977 if (const char *fname = LOCATION_FILE (loc))
11979 /* The documentation says this builtin is equivalent to the preprocessor
11980 __FILE__ macro so it appears appropriate to use the same file prefix
11981 mappings. */
11982 fname = remap_macro_filename (fname);
11983 return build_string_literal (strlen (fname) + 1, fname);
11986 return build_string_literal (1, "");
11989 /* Fold a call to __builtin_FUNCTION to a constant string. */
11991 static inline tree
11992 fold_builtin_FUNCTION ()
11994 const char *name = "";
11996 if (current_function_decl)
11997 name = lang_hooks.decl_printable_name (current_function_decl, 0);
11999 return build_string_literal (strlen (name) + 1, name);
12002 /* Fold a call to __builtin_LINE to an integer constant. */
12004 static inline tree
12005 fold_builtin_LINE (location_t loc, tree type)
12007 return build_int_cst (type, LOCATION_LINE (loc));
12010 /* Fold a call to built-in function FNDECL with 0 arguments.
12011 This function returns NULL_TREE if no simplification was possible. */
12013 static tree
12014 fold_builtin_0 (location_t loc, tree fndecl)
12016 tree type = TREE_TYPE (TREE_TYPE (fndecl));
12017 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
12018 switch (fcode)
12020 case BUILT_IN_FILE:
12021 return fold_builtin_FILE (loc);
12023 case BUILT_IN_FUNCTION:
12024 return fold_builtin_FUNCTION ();
12026 case BUILT_IN_LINE:
12027 return fold_builtin_LINE (loc, type);
12029 CASE_FLT_FN (BUILT_IN_INF):
12030 CASE_FLT_FN_FLOATN_NX (BUILT_IN_INF):
12031 case BUILT_IN_INFD32:
12032 case BUILT_IN_INFD64:
12033 case BUILT_IN_INFD128:
12034 return fold_builtin_inf (loc, type, true);
12036 CASE_FLT_FN (BUILT_IN_HUGE_VAL):
12037 CASE_FLT_FN_FLOATN_NX (BUILT_IN_HUGE_VAL):
12038 return fold_builtin_inf (loc, type, false);
12040 case BUILT_IN_CLASSIFY_TYPE:
12041 return fold_builtin_classify_type (NULL_TREE);
12043 default:
12044 break;
12046 return NULL_TREE;
12049 /* Fold a call to built-in function FNDECL with 1 argument, ARG0.
12050 This function returns NULL_TREE if no simplification was possible. */
12052 static tree
12053 fold_builtin_1 (location_t loc, tree expr, tree fndecl, tree arg0)
12055 tree type = TREE_TYPE (TREE_TYPE (fndecl));
12056 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
12058 if (TREE_CODE (arg0) == ERROR_MARK)
12059 return NULL_TREE;
12061 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0))
12062 return ret;
12064 switch (fcode)
12066 case BUILT_IN_CONSTANT_P:
12068 tree val = fold_builtin_constant_p (arg0);
12070 /* Gimplification will pull the CALL_EXPR for the builtin out of
12071 an if condition. When not optimizing, we'll not CSE it back.
12072 To avoid link error types of regressions, return false now. */
12073 if (!val && !optimize)
12074 val = integer_zero_node;
12076 return val;
12079 case BUILT_IN_CLASSIFY_TYPE:
12080 return fold_builtin_classify_type (arg0);
12082 case BUILT_IN_STRLEN:
12083 return fold_builtin_strlen (loc, expr, type, arg0);
12085 CASE_FLT_FN (BUILT_IN_FABS):
12086 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
12087 case BUILT_IN_FABSD32:
12088 case BUILT_IN_FABSD64:
12089 case BUILT_IN_FABSD128:
12090 return fold_builtin_fabs (loc, arg0, type);
12092 case BUILT_IN_ABS:
12093 case BUILT_IN_LABS:
12094 case BUILT_IN_LLABS:
12095 case BUILT_IN_IMAXABS:
12096 return fold_builtin_abs (loc, arg0, type);
12098 CASE_FLT_FN (BUILT_IN_CONJ):
12099 if (validate_arg (arg0, COMPLEX_TYPE)
12100 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
12101 return fold_build1_loc (loc, CONJ_EXPR, type, arg0);
12102 break;
12104 CASE_FLT_FN (BUILT_IN_CREAL):
12105 if (validate_arg (arg0, COMPLEX_TYPE)
12106 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
12107 return non_lvalue_loc (loc, fold_build1_loc (loc, REALPART_EXPR, type, arg0));
12108 break;
12110 CASE_FLT_FN (BUILT_IN_CIMAG):
12111 if (validate_arg (arg0, COMPLEX_TYPE)
12112 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE)
12113 return non_lvalue_loc (loc, fold_build1_loc (loc, IMAGPART_EXPR, type, arg0));
12114 break;
12116 CASE_FLT_FN (BUILT_IN_CARG):
12117 return fold_builtin_carg (loc, arg0, type);
12119 case BUILT_IN_ISASCII:
12120 return fold_builtin_isascii (loc, arg0);
12122 case BUILT_IN_TOASCII:
12123 return fold_builtin_toascii (loc, arg0);
12125 case BUILT_IN_ISDIGIT:
12126 return fold_builtin_isdigit (loc, arg0);
12128 CASE_FLT_FN (BUILT_IN_FINITE):
12129 case BUILT_IN_FINITED32:
12130 case BUILT_IN_FINITED64:
12131 case BUILT_IN_FINITED128:
12132 case BUILT_IN_ISFINITE:
12134 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISFINITE);
12135 if (ret)
12136 return ret;
12137 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
12140 CASE_FLT_FN (BUILT_IN_ISINF):
12141 case BUILT_IN_ISINFD32:
12142 case BUILT_IN_ISINFD64:
12143 case BUILT_IN_ISINFD128:
12145 tree ret = fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF);
12146 if (ret)
12147 return ret;
12148 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
12151 case BUILT_IN_ISNORMAL:
12152 return fold_builtin_interclass_mathfn (loc, fndecl, arg0);
12154 case BUILT_IN_ISINF_SIGN:
12155 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISINF_SIGN);
12157 CASE_FLT_FN (BUILT_IN_ISNAN):
12158 case BUILT_IN_ISNAND32:
12159 case BUILT_IN_ISNAND64:
12160 case BUILT_IN_ISNAND128:
12161 return fold_builtin_classify (loc, fndecl, arg0, BUILT_IN_ISNAN);
12163 case BUILT_IN_FREE:
12164 if (integer_zerop (arg0))
12165 return build_empty_stmt (loc);
12166 break;
12168 default:
12169 break;
12172 return NULL_TREE;
12176 /* Folds a call EXPR (which may be null) to built-in function FNDECL
12177 with 2 arguments, ARG0 and ARG1. This function returns NULL_TREE
12178 if no simplification was possible. */
12180 static tree
12181 fold_builtin_2 (location_t loc, tree expr, tree fndecl, tree arg0, tree arg1)
12183 tree type = TREE_TYPE (TREE_TYPE (fndecl));
12184 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
12186 if (TREE_CODE (arg0) == ERROR_MARK
12187 || TREE_CODE (arg1) == ERROR_MARK)
12188 return NULL_TREE;
12190 if (tree ret = fold_const_call (as_combined_fn (fcode), type, arg0, arg1))
12191 return ret;
12193 switch (fcode)
12195 CASE_FLT_FN_REENT (BUILT_IN_GAMMA): /* GAMMA_R */
12196 CASE_FLT_FN_REENT (BUILT_IN_LGAMMA): /* LGAMMA_R */
12197 if (validate_arg (arg0, REAL_TYPE)
12198 && validate_arg (arg1, POINTER_TYPE))
12199 return do_mpfr_lgamma_r (arg0, arg1, type);
12200 break;
12202 CASE_FLT_FN (BUILT_IN_FREXP):
12203 return fold_builtin_frexp (loc, arg0, arg1, type);
12205 CASE_FLT_FN (BUILT_IN_MODF):
12206 return fold_builtin_modf (loc, arg0, arg1, type);
12208 case BUILT_IN_STRSPN:
12209 return fold_builtin_strspn (loc, expr, arg0, arg1);
12211 case BUILT_IN_STRCSPN:
12212 return fold_builtin_strcspn (loc, expr, arg0, arg1);
12214 case BUILT_IN_STRPBRK:
12215 return fold_builtin_strpbrk (loc, expr, arg0, arg1, type);
12217 case BUILT_IN_EXPECT:
12218 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, NULL_TREE);
12220 case BUILT_IN_ISGREATER:
12221 return fold_builtin_unordered_cmp (loc, fndecl,
12222 arg0, arg1, UNLE_EXPR, LE_EXPR);
12223 case BUILT_IN_ISGREATEREQUAL:
12224 return fold_builtin_unordered_cmp (loc, fndecl,
12225 arg0, arg1, UNLT_EXPR, LT_EXPR);
12226 case BUILT_IN_ISLESS:
12227 return fold_builtin_unordered_cmp (loc, fndecl,
12228 arg0, arg1, UNGE_EXPR, GE_EXPR);
12229 case BUILT_IN_ISLESSEQUAL:
12230 return fold_builtin_unordered_cmp (loc, fndecl,
12231 arg0, arg1, UNGT_EXPR, GT_EXPR);
12232 case BUILT_IN_ISLESSGREATER:
12233 return fold_builtin_unordered_cmp (loc, fndecl,
12234 arg0, arg1, UNEQ_EXPR, EQ_EXPR);
12235 case BUILT_IN_ISUNORDERED:
12236 return fold_builtin_unordered_cmp (loc, fndecl,
12237 arg0, arg1, UNORDERED_EXPR,
12238 NOP_EXPR);
12240 /* We do the folding for va_start in the expander. */
12241 case BUILT_IN_VA_START:
12242 break;
12244 case BUILT_IN_OBJECT_SIZE:
12245 return fold_builtin_object_size (arg0, arg1);
12247 case BUILT_IN_ATOMIC_ALWAYS_LOCK_FREE:
12248 return fold_builtin_atomic_always_lock_free (arg0, arg1);
12250 case BUILT_IN_ATOMIC_IS_LOCK_FREE:
12251 return fold_builtin_atomic_is_lock_free (arg0, arg1);
12253 default:
12254 break;
12256 return NULL_TREE;
12259 /* Fold a call to built-in function FNDECL with 3 arguments, ARG0, ARG1,
12260 and ARG2.
12261 This function returns NULL_TREE if no simplification was possible. */
12263 static tree
12264 fold_builtin_3 (location_t loc, tree fndecl,
12265 tree arg0, tree arg1, tree arg2)
12267 tree type = TREE_TYPE (TREE_TYPE (fndecl));
12268 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
12270 if (TREE_CODE (arg0) == ERROR_MARK
12271 || TREE_CODE (arg1) == ERROR_MARK
12272 || TREE_CODE (arg2) == ERROR_MARK)
12273 return NULL_TREE;
12275 if (tree ret = fold_const_call (as_combined_fn (fcode), type,
12276 arg0, arg1, arg2))
12277 return ret;
12279 switch (fcode)
12282 CASE_FLT_FN (BUILT_IN_SINCOS):
12283 return fold_builtin_sincos (loc, arg0, arg1, arg2);
12285 CASE_FLT_FN (BUILT_IN_REMQUO):
12286 if (validate_arg (arg0, REAL_TYPE)
12287 && validate_arg (arg1, REAL_TYPE)
12288 && validate_arg (arg2, POINTER_TYPE))
12289 return do_mpfr_remquo (arg0, arg1, arg2);
12290 break;
12292 case BUILT_IN_MEMCMP:
12293 return fold_builtin_memcmp (loc, arg0, arg1, arg2);
12295 case BUILT_IN_EXPECT:
12296 return fold_builtin_expect (loc, arg0, arg1, arg2, NULL_TREE);
12298 case BUILT_IN_EXPECT_WITH_PROBABILITY:
12299 return fold_builtin_expect (loc, arg0, arg1, NULL_TREE, arg2);
12301 case BUILT_IN_ADD_OVERFLOW:
12302 case BUILT_IN_SUB_OVERFLOW:
12303 case BUILT_IN_MUL_OVERFLOW:
12304 case BUILT_IN_ADD_OVERFLOW_P:
12305 case BUILT_IN_SUB_OVERFLOW_P:
12306 case BUILT_IN_MUL_OVERFLOW_P:
12307 case BUILT_IN_SADD_OVERFLOW:
12308 case BUILT_IN_SADDL_OVERFLOW:
12309 case BUILT_IN_SADDLL_OVERFLOW:
12310 case BUILT_IN_SSUB_OVERFLOW:
12311 case BUILT_IN_SSUBL_OVERFLOW:
12312 case BUILT_IN_SSUBLL_OVERFLOW:
12313 case BUILT_IN_SMUL_OVERFLOW:
12314 case BUILT_IN_SMULL_OVERFLOW:
12315 case BUILT_IN_SMULLL_OVERFLOW:
12316 case BUILT_IN_UADD_OVERFLOW:
12317 case BUILT_IN_UADDL_OVERFLOW:
12318 case BUILT_IN_UADDLL_OVERFLOW:
12319 case BUILT_IN_USUB_OVERFLOW:
12320 case BUILT_IN_USUBL_OVERFLOW:
12321 case BUILT_IN_USUBLL_OVERFLOW:
12322 case BUILT_IN_UMUL_OVERFLOW:
12323 case BUILT_IN_UMULL_OVERFLOW:
12324 case BUILT_IN_UMULLL_OVERFLOW:
12325 return fold_builtin_arith_overflow (loc, fcode, arg0, arg1, arg2);
12327 default:
12328 break;
12330 return NULL_TREE;
12333 /* Folds a call EXPR (which may be null) to built-in function FNDECL.
12334 ARGS is an array of NARGS arguments. IGNORE is true if the result
12335 of the function call is ignored. This function returns NULL_TREE
12336 if no simplification was possible. */
12338 static tree
12339 fold_builtin_n (location_t loc, tree expr, tree fndecl, tree *args,
12340 int nargs, bool)
12342 tree ret = NULL_TREE;
12344 switch (nargs)
12346 case 0:
12347 ret = fold_builtin_0 (loc, fndecl);
12348 break;
12349 case 1:
12350 ret = fold_builtin_1 (loc, expr, fndecl, args[0]);
12351 break;
12352 case 2:
12353 ret = fold_builtin_2 (loc, expr, fndecl, args[0], args[1]);
12354 break;
12355 case 3:
12356 ret = fold_builtin_3 (loc, fndecl, args[0], args[1], args[2]);
12357 break;
12358 default:
12359 ret = fold_builtin_varargs (loc, fndecl, args, nargs);
12360 break;
12362 if (ret)
12364 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
12365 SET_EXPR_LOCATION (ret, loc);
12366 return ret;
12368 return NULL_TREE;
12371 /* Construct a new CALL_EXPR to FNDECL using the tail of the argument
12372 list ARGS along with N new arguments in NEWARGS. SKIP is the number
12373 of arguments in ARGS to be omitted. OLDNARGS is the number of
12374 elements in ARGS. */
12376 static tree
12377 rewrite_call_expr_valist (location_t loc, int oldnargs, tree *args,
12378 int skip, tree fndecl, int n, va_list newargs)
12380 int nargs = oldnargs - skip + n;
12381 tree *buffer;
12383 if (n > 0)
12385 int i, j;
12387 buffer = XALLOCAVEC (tree, nargs);
12388 for (i = 0; i < n; i++)
12389 buffer[i] = va_arg (newargs, tree);
12390 for (j = skip; j < oldnargs; j++, i++)
12391 buffer[i] = args[j];
12393 else
12394 buffer = args + skip;
12396 return build_call_expr_loc_array (loc, fndecl, nargs, buffer);
12399 /* Return true if FNDECL shouldn't be folded right now.
12400 If a built-in function has an inline attribute always_inline
12401 wrapper, defer folding it after always_inline functions have
12402 been inlined, otherwise e.g. -D_FORTIFY_SOURCE checking
12403 might not be performed. */
12405 bool
12406 avoid_folding_inline_builtin (tree fndecl)
12408 return (DECL_DECLARED_INLINE_P (fndecl)
12409 && DECL_DISREGARD_INLINE_LIMITS (fndecl)
12410 && cfun
12411 && !cfun->always_inline_functions_inlined
12412 && lookup_attribute ("always_inline", DECL_ATTRIBUTES (fndecl)));
12415 /* A wrapper function for builtin folding that prevents warnings for
12416 "statement without effect" and the like, caused by removing the
12417 call node earlier than the warning is generated. */
12419 tree
12420 fold_call_expr (location_t loc, tree exp, bool ignore)
12422 tree ret = NULL_TREE;
12423 tree fndecl = get_callee_fndecl (exp);
12424 if (fndecl && fndecl_built_in_p (fndecl)
12425 /* If CALL_EXPR_VA_ARG_PACK is set, the arguments aren't finalized
12426 yet. Defer folding until we see all the arguments
12427 (after inlining). */
12428 && !CALL_EXPR_VA_ARG_PACK (exp))
12430 int nargs = call_expr_nargs (exp);
12432 /* Before gimplification CALL_EXPR_VA_ARG_PACK is not set, but
12433 instead last argument is __builtin_va_arg_pack (). Defer folding
12434 even in that case, until arguments are finalized. */
12435 if (nargs && TREE_CODE (CALL_EXPR_ARG (exp, nargs - 1)) == CALL_EXPR)
12437 tree fndecl2 = get_callee_fndecl (CALL_EXPR_ARG (exp, nargs - 1));
12438 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
12439 return NULL_TREE;
12442 if (avoid_folding_inline_builtin (fndecl))
12443 return NULL_TREE;
12445 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
12446 return targetm.fold_builtin (fndecl, call_expr_nargs (exp),
12447 CALL_EXPR_ARGP (exp), ignore);
12448 else
12450 tree *args = CALL_EXPR_ARGP (exp);
12451 ret = fold_builtin_n (loc, exp, fndecl, args, nargs, ignore);
12452 if (ret)
12453 return ret;
12456 return NULL_TREE;
12459 /* Fold a CALL_EXPR with type TYPE with FN as the function expression.
12460 N arguments are passed in the array ARGARRAY. Return a folded
12461 expression or NULL_TREE if no simplification was possible. */
12463 tree
12464 fold_builtin_call_array (location_t loc, tree,
12465 tree fn,
12466 int n,
12467 tree *argarray)
12469 if (TREE_CODE (fn) != ADDR_EXPR)
12470 return NULL_TREE;
12472 tree fndecl = TREE_OPERAND (fn, 0);
12473 if (TREE_CODE (fndecl) == FUNCTION_DECL
12474 && fndecl_built_in_p (fndecl))
12476 /* If last argument is __builtin_va_arg_pack (), arguments to this
12477 function are not finalized yet. Defer folding until they are. */
12478 if (n && TREE_CODE (argarray[n - 1]) == CALL_EXPR)
12480 tree fndecl2 = get_callee_fndecl (argarray[n - 1]);
12481 if (fndecl2 && fndecl_built_in_p (fndecl2, BUILT_IN_VA_ARG_PACK))
12482 return NULL_TREE;
12484 if (avoid_folding_inline_builtin (fndecl))
12485 return NULL_TREE;
12486 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
12487 return targetm.fold_builtin (fndecl, n, argarray, false);
12488 else
12489 return fold_builtin_n (loc, NULL_TREE, fndecl, argarray, n, false);
12492 return NULL_TREE;
12495 /* Construct a new CALL_EXPR using the tail of the argument list of EXP
12496 along with N new arguments specified as the "..." parameters. SKIP
12497 is the number of arguments in EXP to be omitted. This function is used
12498 to do varargs-to-varargs transformations. */
12500 static tree
12501 rewrite_call_expr (location_t loc, tree exp, int skip, tree fndecl, int n, ...)
12503 va_list ap;
12504 tree t;
12506 va_start (ap, n);
12507 t = rewrite_call_expr_valist (loc, call_expr_nargs (exp),
12508 CALL_EXPR_ARGP (exp), skip, fndecl, n, ap);
12509 va_end (ap);
12511 return t;
12514 /* Validate a single argument ARG against a tree code CODE representing
12515 a type. Return true when argument is valid. */
12517 static bool
12518 validate_arg (const_tree arg, enum tree_code code)
12520 if (!arg)
12521 return false;
12522 else if (code == POINTER_TYPE)
12523 return POINTER_TYPE_P (TREE_TYPE (arg));
12524 else if (code == INTEGER_TYPE)
12525 return INTEGRAL_TYPE_P (TREE_TYPE (arg));
12526 return code == TREE_CODE (TREE_TYPE (arg));
12529 /* This function validates the types of a function call argument list
12530 against a specified list of tree_codes. If the last specifier is a 0,
12531 that represents an ellipses, otherwise the last specifier must be a
12532 VOID_TYPE.
12534 This is the GIMPLE version of validate_arglist. Eventually we want to
12535 completely convert builtins.c to work from GIMPLEs and the tree based
12536 validate_arglist will then be removed. */
12538 bool
12539 validate_gimple_arglist (const gcall *call, ...)
12541 enum tree_code code;
12542 bool res = 0;
12543 va_list ap;
12544 const_tree arg;
12545 size_t i;
12547 va_start (ap, call);
12548 i = 0;
12552 code = (enum tree_code) va_arg (ap, int);
12553 switch (code)
12555 case 0:
12556 /* This signifies an ellipses, any further arguments are all ok. */
12557 res = true;
12558 goto end;
12559 case VOID_TYPE:
12560 /* This signifies an endlink, if no arguments remain, return
12561 true, otherwise return false. */
12562 res = (i == gimple_call_num_args (call));
12563 goto end;
12564 default:
12565 /* If no parameters remain or the parameter's code does not
12566 match the specified code, return false. Otherwise continue
12567 checking any remaining arguments. */
12568 arg = gimple_call_arg (call, i++);
12569 if (!validate_arg (arg, code))
12570 goto end;
12571 break;
12574 while (1);
12576 /* We need gotos here since we can only have one VA_CLOSE in a
12577 function. */
12578 end: ;
12579 va_end (ap);
12581 return res;
12584 /* Default target-specific builtin expander that does nothing. */
12587 default_expand_builtin (tree exp ATTRIBUTE_UNUSED,
12588 rtx target ATTRIBUTE_UNUSED,
12589 rtx subtarget ATTRIBUTE_UNUSED,
12590 machine_mode mode ATTRIBUTE_UNUSED,
12591 int ignore ATTRIBUTE_UNUSED)
12593 return NULL_RTX;
12596 /* Returns true is EXP represents data that would potentially reside
12597 in a readonly section. */
12599 bool
12600 readonly_data_expr (tree exp)
12602 STRIP_NOPS (exp);
12604 if (TREE_CODE (exp) != ADDR_EXPR)
12605 return false;
12607 exp = get_base_address (TREE_OPERAND (exp, 0));
12608 if (!exp)
12609 return false;
12611 /* Make sure we call decl_readonly_section only for trees it
12612 can handle (since it returns true for everything it doesn't
12613 understand). */
12614 if (TREE_CODE (exp) == STRING_CST
12615 || TREE_CODE (exp) == CONSTRUCTOR
12616 || (VAR_P (exp) && TREE_STATIC (exp)))
12617 return decl_readonly_section (exp, 0);
12618 else
12619 return false;
12622 /* Simplify a call to the strpbrk builtin. S1 and S2 are the arguments
12623 to the call, and TYPE is its return type.
12625 Return NULL_TREE if no simplification was possible, otherwise return the
12626 simplified form of the call as a tree.
12628 The simplified form may be a constant or other expression which
12629 computes the same value, but in a more efficient manner (including
12630 calls to other builtin functions).
12632 The call may contain arguments which need to be evaluated, but
12633 which are not useful to determine the result of the call. In
12634 this case we return a chain of COMPOUND_EXPRs. The LHS of each
12635 COMPOUND_EXPR will be an argument which must be evaluated.
12636 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
12637 COMPOUND_EXPR in the chain will contain the tree for the simplified
12638 form of the builtin function call. */
12640 static tree
12641 fold_builtin_strpbrk (location_t loc, tree, tree s1, tree s2, tree type)
12643 if (!validate_arg (s1, POINTER_TYPE)
12644 || !validate_arg (s2, POINTER_TYPE))
12645 return NULL_TREE;
12647 tree fn;
12648 const char *p1, *p2;
12650 p2 = c_getstr (s2);
12651 if (p2 == NULL)
12652 return NULL_TREE;
12654 p1 = c_getstr (s1);
12655 if (p1 != NULL)
12657 const char *r = strpbrk (p1, p2);
12658 tree tem;
12660 if (r == NULL)
12661 return build_int_cst (TREE_TYPE (s1), 0);
12663 /* Return an offset into the constant string argument. */
12664 tem = fold_build_pointer_plus_hwi_loc (loc, s1, r - p1);
12665 return fold_convert_loc (loc, type, tem);
12668 if (p2[0] == '\0')
12669 /* strpbrk(x, "") == NULL.
12670 Evaluate and ignore s1 in case it had side-effects. */
12671 return omit_one_operand_loc (loc, type, integer_zero_node, s1);
12673 if (p2[1] != '\0')
12674 return NULL_TREE; /* Really call strpbrk. */
12676 fn = builtin_decl_implicit (BUILT_IN_STRCHR);
12677 if (!fn)
12678 return NULL_TREE;
12680 /* New argument list transforming strpbrk(s1, s2) to
12681 strchr(s1, s2[0]). */
12682 return build_call_expr_loc (loc, fn, 2, s1,
12683 build_int_cst (integer_type_node, p2[0]));
12686 /* Simplify a call to the strspn builtin. S1 and S2 are the arguments
12687 to the call.
12689 Return NULL_TREE if no simplification was possible, otherwise return the
12690 simplified form of the call as a tree.
12692 The simplified form may be a constant or other expression which
12693 computes the same value, but in a more efficient manner (including
12694 calls to other builtin functions).
12696 The call may contain arguments which need to be evaluated, but
12697 which are not useful to determine the result of the call. In
12698 this case we return a chain of COMPOUND_EXPRs. The LHS of each
12699 COMPOUND_EXPR will be an argument which must be evaluated.
12700 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
12701 COMPOUND_EXPR in the chain will contain the tree for the simplified
12702 form of the builtin function call. */
12704 static tree
12705 fold_builtin_strspn (location_t loc, tree expr, tree s1, tree s2)
12707 if (!validate_arg (s1, POINTER_TYPE)
12708 || !validate_arg (s2, POINTER_TYPE))
12709 return NULL_TREE;
12711 if (!check_nul_terminated_array (expr, s1)
12712 || !check_nul_terminated_array (expr, s2))
12713 return NULL_TREE;
12715 const char *p1 = c_getstr (s1), *p2 = c_getstr (s2);
12717 /* If either argument is "", return NULL_TREE. */
12718 if ((p1 && *p1 == '\0') || (p2 && *p2 == '\0'))
12719 /* Evaluate and ignore both arguments in case either one has
12720 side-effects. */
12721 return omit_two_operands_loc (loc, size_type_node, size_zero_node,
12722 s1, s2);
12723 return NULL_TREE;
12726 /* Simplify a call to the strcspn builtin. S1 and S2 are the arguments
12727 to the call.
12729 Return NULL_TREE if no simplification was possible, otherwise return the
12730 simplified form of the call as a tree.
12732 The simplified form may be a constant or other expression which
12733 computes the same value, but in a more efficient manner (including
12734 calls to other builtin functions).
12736 The call may contain arguments which need to be evaluated, but
12737 which are not useful to determine the result of the call. In
12738 this case we return a chain of COMPOUND_EXPRs. The LHS of each
12739 COMPOUND_EXPR will be an argument which must be evaluated.
12740 COMPOUND_EXPRs are chained through their RHS. The RHS of the last
12741 COMPOUND_EXPR in the chain will contain the tree for the simplified
12742 form of the builtin function call. */
12744 static tree
12745 fold_builtin_strcspn (location_t loc, tree expr, tree s1, tree s2)
12747 if (!validate_arg (s1, POINTER_TYPE)
12748 || !validate_arg (s2, POINTER_TYPE))
12749 return NULL_TREE;
12751 if (!check_nul_terminated_array (expr, s1)
12752 || !check_nul_terminated_array (expr, s2))
12753 return NULL_TREE;
12755 /* If the first argument is "", return NULL_TREE. */
12756 const char *p1 = c_getstr (s1);
12757 if (p1 && *p1 == '\0')
12759 /* Evaluate and ignore argument s2 in case it has
12760 side-effects. */
12761 return omit_one_operand_loc (loc, size_type_node,
12762 size_zero_node, s2);
12765 /* If the second argument is "", return __builtin_strlen(s1). */
12766 const char *p2 = c_getstr (s2);
12767 if (p2 && *p2 == '\0')
12769 tree fn = builtin_decl_implicit (BUILT_IN_STRLEN);
12771 /* If the replacement _DECL isn't initialized, don't do the
12772 transformation. */
12773 if (!fn)
12774 return NULL_TREE;
12776 return build_call_expr_loc (loc, fn, 1, s1);
12778 return NULL_TREE;
12781 /* Fold the next_arg or va_start call EXP. Returns true if there was an error
12782 produced. False otherwise. This is done so that we don't output the error
12783 or warning twice or three times. */
12785 bool
12786 fold_builtin_next_arg (tree exp, bool va_start_p)
12788 tree fntype = TREE_TYPE (current_function_decl);
12789 int nargs = call_expr_nargs (exp);
12790 tree arg;
12791 /* There is good chance the current input_location points inside the
12792 definition of the va_start macro (perhaps on the token for
12793 builtin) in a system header, so warnings will not be emitted.
12794 Use the location in real source code. */
12795 location_t current_location =
12796 linemap_unwind_to_first_non_reserved_loc (line_table, input_location,
12797 NULL);
12799 if (!stdarg_p (fntype))
12801 error ("%<va_start%> used in function with fixed arguments");
12802 return true;
12805 if (va_start_p)
12807 if (va_start_p && (nargs != 2))
12809 error ("wrong number of arguments to function %<va_start%>");
12810 return true;
12812 arg = CALL_EXPR_ARG (exp, 1);
12814 /* We use __builtin_va_start (ap, 0, 0) or __builtin_next_arg (0, 0)
12815 when we checked the arguments and if needed issued a warning. */
12816 else
12818 if (nargs == 0)
12820 /* Evidently an out of date version of <stdarg.h>; can't validate
12821 va_start's second argument, but can still work as intended. */
12822 warning_at (current_location,
12823 OPT_Wvarargs,
12824 "%<__builtin_next_arg%> called without an argument");
12825 return true;
12827 else if (nargs > 1)
12829 error ("wrong number of arguments to function %<__builtin_next_arg%>");
12830 return true;
12832 arg = CALL_EXPR_ARG (exp, 0);
12835 if (TREE_CODE (arg) == SSA_NAME
12836 && SSA_NAME_VAR (arg))
12837 arg = SSA_NAME_VAR (arg);
12839 /* We destructively modify the call to be __builtin_va_start (ap, 0)
12840 or __builtin_next_arg (0) the first time we see it, after checking
12841 the arguments and if needed issuing a warning. */
12842 if (!integer_zerop (arg))
12844 tree last_parm = tree_last (DECL_ARGUMENTS (current_function_decl));
12846 /* Strip off all nops for the sake of the comparison. This
12847 is not quite the same as STRIP_NOPS. It does more.
12848 We must also strip off INDIRECT_EXPR for C++ reference
12849 parameters. */
12850 while (CONVERT_EXPR_P (arg)
12851 || TREE_CODE (arg) == INDIRECT_REF)
12852 arg = TREE_OPERAND (arg, 0);
12853 if (arg != last_parm)
12855 /* FIXME: Sometimes with the tree optimizers we can get the
12856 not the last argument even though the user used the last
12857 argument. We just warn and set the arg to be the last
12858 argument so that we will get wrong-code because of
12859 it. */
12860 warning_at (current_location,
12861 OPT_Wvarargs,
12862 "second parameter of %<va_start%> not last named argument");
12865 /* Undefined by C99 7.15.1.4p4 (va_start):
12866 "If the parameter parmN is declared with the register storage
12867 class, with a function or array type, or with a type that is
12868 not compatible with the type that results after application of
12869 the default argument promotions, the behavior is undefined."
12871 else if (DECL_REGISTER (arg))
12873 warning_at (current_location,
12874 OPT_Wvarargs,
12875 "undefined behavior when second parameter of "
12876 "%<va_start%> is declared with %<register%> storage");
12879 /* We want to verify the second parameter just once before the tree
12880 optimizers are run and then avoid keeping it in the tree,
12881 as otherwise we could warn even for correct code like:
12882 void foo (int i, ...)
12883 { va_list ap; i++; va_start (ap, i); va_end (ap); } */
12884 if (va_start_p)
12885 CALL_EXPR_ARG (exp, 1) = integer_zero_node;
12886 else
12887 CALL_EXPR_ARG (exp, 0) = integer_zero_node;
12889 return false;
12893 /* Expand a call EXP to __builtin_object_size. */
12895 static rtx
12896 expand_builtin_object_size (tree exp)
12898 tree ost;
12899 int object_size_type;
12900 tree fndecl = get_callee_fndecl (exp);
12902 if (!validate_arglist (exp, POINTER_TYPE, INTEGER_TYPE, VOID_TYPE))
12904 error ("%Kfirst argument of %qD must be a pointer, second integer constant",
12905 exp, fndecl);
12906 expand_builtin_trap ();
12907 return const0_rtx;
12910 ost = CALL_EXPR_ARG (exp, 1);
12911 STRIP_NOPS (ost);
12913 if (TREE_CODE (ost) != INTEGER_CST
12914 || tree_int_cst_sgn (ost) < 0
12915 || compare_tree_int (ost, 3) > 0)
12917 error ("%Klast argument of %qD is not integer constant between 0 and 3",
12918 exp, fndecl);
12919 expand_builtin_trap ();
12920 return const0_rtx;
12923 object_size_type = tree_to_shwi (ost);
12925 return object_size_type < 2 ? constm1_rtx : const0_rtx;
12928 /* Expand EXP, a call to the __mem{cpy,pcpy,move,set}_chk builtin.
12929 FCODE is the BUILT_IN_* to use.
12930 Return NULL_RTX if we failed; the caller should emit a normal call,
12931 otherwise try to get the result in TARGET, if convenient (and in
12932 mode MODE if that's convenient). */
12934 static rtx
12935 expand_builtin_memory_chk (tree exp, rtx target, machine_mode mode,
12936 enum built_in_function fcode)
12938 if (!validate_arglist (exp,
12939 POINTER_TYPE,
12940 fcode == BUILT_IN_MEMSET_CHK
12941 ? INTEGER_TYPE : POINTER_TYPE,
12942 INTEGER_TYPE, INTEGER_TYPE, VOID_TYPE))
12943 return NULL_RTX;
12945 tree dest = CALL_EXPR_ARG (exp, 0);
12946 tree src = CALL_EXPR_ARG (exp, 1);
12947 tree len = CALL_EXPR_ARG (exp, 2);
12948 tree size = CALL_EXPR_ARG (exp, 3);
12950 /* FIXME: Set access mode to write only for memset et al. */
12951 bool sizes_ok = check_access (exp, len, /*maxread=*/NULL_TREE,
12952 /*srcstr=*/NULL_TREE, size, access_read_write);
12954 if (!tree_fits_uhwi_p (size))
12955 return NULL_RTX;
12957 if (tree_fits_uhwi_p (len) || integer_all_onesp (size))
12959 /* Avoid transforming the checking call to an ordinary one when
12960 an overflow has been detected or when the call couldn't be
12961 validated because the size is not constant. */
12962 if (!sizes_ok && !integer_all_onesp (size) && tree_int_cst_lt (size, len))
12963 return NULL_RTX;
12965 tree fn = NULL_TREE;
12966 /* If __builtin_mem{cpy,pcpy,move,set}_chk is used, assume
12967 mem{cpy,pcpy,move,set} is available. */
12968 switch (fcode)
12970 case BUILT_IN_MEMCPY_CHK:
12971 fn = builtin_decl_explicit (BUILT_IN_MEMCPY);
12972 break;
12973 case BUILT_IN_MEMPCPY_CHK:
12974 fn = builtin_decl_explicit (BUILT_IN_MEMPCPY);
12975 break;
12976 case BUILT_IN_MEMMOVE_CHK:
12977 fn = builtin_decl_explicit (BUILT_IN_MEMMOVE);
12978 break;
12979 case BUILT_IN_MEMSET_CHK:
12980 fn = builtin_decl_explicit (BUILT_IN_MEMSET);
12981 break;
12982 default:
12983 break;
12986 if (! fn)
12987 return NULL_RTX;
12989 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 3, dest, src, len);
12990 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
12991 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
12992 return expand_expr (fn, target, mode, EXPAND_NORMAL);
12994 else if (fcode == BUILT_IN_MEMSET_CHK)
12995 return NULL_RTX;
12996 else
12998 unsigned int dest_align = get_pointer_alignment (dest);
13000 /* If DEST is not a pointer type, call the normal function. */
13001 if (dest_align == 0)
13002 return NULL_RTX;
13004 /* If SRC and DEST are the same (and not volatile), do nothing. */
13005 if (operand_equal_p (src, dest, 0))
13007 tree expr;
13009 if (fcode != BUILT_IN_MEMPCPY_CHK)
13011 /* Evaluate and ignore LEN in case it has side-effects. */
13012 expand_expr (len, const0_rtx, VOIDmode, EXPAND_NORMAL);
13013 return expand_expr (dest, target, mode, EXPAND_NORMAL);
13016 expr = fold_build_pointer_plus (dest, len);
13017 return expand_expr (expr, target, mode, EXPAND_NORMAL);
13020 /* __memmove_chk special case. */
13021 if (fcode == BUILT_IN_MEMMOVE_CHK)
13023 unsigned int src_align = get_pointer_alignment (src);
13025 if (src_align == 0)
13026 return NULL_RTX;
13028 /* If src is categorized for a readonly section we can use
13029 normal __memcpy_chk. */
13030 if (readonly_data_expr (src))
13032 tree fn = builtin_decl_explicit (BUILT_IN_MEMCPY_CHK);
13033 if (!fn)
13034 return NULL_RTX;
13035 fn = build_call_nofold_loc (EXPR_LOCATION (exp), fn, 4,
13036 dest, src, len, size);
13037 gcc_assert (TREE_CODE (fn) == CALL_EXPR);
13038 CALL_EXPR_TAILCALL (fn) = CALL_EXPR_TAILCALL (exp);
13039 return expand_expr (fn, target, mode, EXPAND_NORMAL);
13042 return NULL_RTX;
13046 /* Emit warning if a buffer overflow is detected at compile time. */
13048 static void
13049 maybe_emit_chk_warning (tree exp, enum built_in_function fcode)
13051 /* The source string. */
13052 tree srcstr = NULL_TREE;
13053 /* The size of the destination object returned by __builtin_object_size. */
13054 tree objsize = NULL_TREE;
13055 /* The string that is being concatenated with (as in __strcat_chk)
13056 or null if it isn't. */
13057 tree catstr = NULL_TREE;
13058 /* The maximum length of the source sequence in a bounded operation
13059 (such as __strncat_chk) or null if the operation isn't bounded
13060 (such as __strcat_chk). */
13061 tree maxread = NULL_TREE;
13062 /* The exact size of the access (such as in __strncpy_chk). */
13063 tree size = NULL_TREE;
13064 /* The access by the function that's checked. Except for snprintf
13065 both writing and reading is checked. */
13066 access_mode mode = access_read_write;
13068 switch (fcode)
13070 case BUILT_IN_STRCPY_CHK:
13071 case BUILT_IN_STPCPY_CHK:
13072 srcstr = CALL_EXPR_ARG (exp, 1);
13073 objsize = CALL_EXPR_ARG (exp, 2);
13074 break;
13076 case BUILT_IN_STRCAT_CHK:
13077 /* For __strcat_chk the warning will be emitted only if overflowing
13078 by at least strlen (dest) + 1 bytes. */
13079 catstr = CALL_EXPR_ARG (exp, 0);
13080 srcstr = CALL_EXPR_ARG (exp, 1);
13081 objsize = CALL_EXPR_ARG (exp, 2);
13082 break;
13084 case BUILT_IN_STRNCAT_CHK:
13085 catstr = CALL_EXPR_ARG (exp, 0);
13086 srcstr = CALL_EXPR_ARG (exp, 1);
13087 maxread = CALL_EXPR_ARG (exp, 2);
13088 objsize = CALL_EXPR_ARG (exp, 3);
13089 break;
13091 case BUILT_IN_STRNCPY_CHK:
13092 case BUILT_IN_STPNCPY_CHK:
13093 srcstr = CALL_EXPR_ARG (exp, 1);
13094 size = CALL_EXPR_ARG (exp, 2);
13095 objsize = CALL_EXPR_ARG (exp, 3);
13096 break;
13098 case BUILT_IN_SNPRINTF_CHK:
13099 case BUILT_IN_VSNPRINTF_CHK:
13100 maxread = CALL_EXPR_ARG (exp, 1);
13101 objsize = CALL_EXPR_ARG (exp, 3);
13102 /* The only checked access the write to the destination. */
13103 mode = access_write_only;
13104 break;
13105 default:
13106 gcc_unreachable ();
13109 if (catstr && maxread)
13111 /* Check __strncat_chk. There is no way to determine the length
13112 of the string to which the source string is being appended so
13113 just warn when the length of the source string is not known. */
13114 check_strncat_sizes (exp, objsize);
13115 return;
13118 check_access (exp, size, maxread, srcstr, objsize, mode);
13121 /* Emit warning if a buffer overflow is detected at compile time
13122 in __sprintf_chk/__vsprintf_chk calls. */
13124 static void
13125 maybe_emit_sprintf_chk_warning (tree exp, enum built_in_function fcode)
13127 tree size, len, fmt;
13128 const char *fmt_str;
13129 int nargs = call_expr_nargs (exp);
13131 /* Verify the required arguments in the original call. */
13133 if (nargs < 4)
13134 return;
13135 size = CALL_EXPR_ARG (exp, 2);
13136 fmt = CALL_EXPR_ARG (exp, 3);
13138 if (! tree_fits_uhwi_p (size) || integer_all_onesp (size))
13139 return;
13141 /* Check whether the format is a literal string constant. */
13142 fmt_str = c_getstr (fmt);
13143 if (fmt_str == NULL)
13144 return;
13146 if (!init_target_chars ())
13147 return;
13149 /* If the format doesn't contain % args or %%, we know its size. */
13150 if (strchr (fmt_str, target_percent) == 0)
13151 len = build_int_cstu (size_type_node, strlen (fmt_str));
13152 /* If the format is "%s" and first ... argument is a string literal,
13153 we know it too. */
13154 else if (fcode == BUILT_IN_SPRINTF_CHK
13155 && strcmp (fmt_str, target_percent_s) == 0)
13157 tree arg;
13159 if (nargs < 5)
13160 return;
13161 arg = CALL_EXPR_ARG (exp, 4);
13162 if (! POINTER_TYPE_P (TREE_TYPE (arg)))
13163 return;
13165 len = c_strlen (arg, 1);
13166 if (!len || ! tree_fits_uhwi_p (len))
13167 return;
13169 else
13170 return;
13172 /* Add one for the terminating nul. */
13173 len = fold_build2 (PLUS_EXPR, TREE_TYPE (len), len, size_one_node);
13175 check_access (exp, /*size=*/NULL_TREE, /*maxread=*/NULL_TREE, len, size,
13176 access_write_only);
13179 /* Return true if STMT is a call to an allocation function. Unless
13180 ALL_ALLOC is set, consider only functions that return dynmamically
13181 allocated objects. Otherwise return true even for all forms of
13182 alloca (including VLA). */
13184 static bool
13185 fndecl_alloc_p (tree fndecl, bool all_alloc)
13187 if (!fndecl)
13188 return false;
13190 /* A call to operator new isn't recognized as one to a built-in. */
13191 if (DECL_IS_OPERATOR_NEW_P (fndecl))
13192 return true;
13194 if (fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
13196 switch (DECL_FUNCTION_CODE (fndecl))
13198 case BUILT_IN_ALLOCA:
13199 case BUILT_IN_ALLOCA_WITH_ALIGN:
13200 return all_alloc;
13201 case BUILT_IN_ALIGNED_ALLOC:
13202 case BUILT_IN_CALLOC:
13203 case BUILT_IN_GOMP_ALLOC:
13204 case BUILT_IN_MALLOC:
13205 case BUILT_IN_REALLOC:
13206 case BUILT_IN_STRDUP:
13207 case BUILT_IN_STRNDUP:
13208 return true;
13209 default:
13210 break;
13214 /* A function is considered an allocation function if it's declared
13215 with attribute malloc with an argument naming its associated
13216 deallocation function. */
13217 tree attrs = DECL_ATTRIBUTES (fndecl);
13218 if (!attrs)
13219 return false;
13221 for (tree allocs = attrs;
13222 (allocs = lookup_attribute ("malloc", allocs));
13223 allocs = TREE_CHAIN (allocs))
13225 tree args = TREE_VALUE (allocs);
13226 if (!args)
13227 continue;
13229 if (TREE_VALUE (args))
13230 return true;
13233 return false;
13236 /* Return true if STMT is a call to an allocation function. A wrapper
13237 around fndecl_alloc_p. */
13239 static bool
13240 gimple_call_alloc_p (gimple *stmt, bool all_alloc = false)
13242 return fndecl_alloc_p (gimple_call_fndecl (stmt), all_alloc);
13245 /* Return the zero-based number corresponding to the argument being
13246 deallocated if STMT is a call to a deallocation function or UINT_MAX
13247 if it isn't. */
13249 static unsigned
13250 call_dealloc_argno (tree exp)
13252 tree fndecl = get_callee_fndecl (exp);
13253 if (!fndecl)
13254 return UINT_MAX;
13256 return fndecl_dealloc_argno (fndecl);
13259 /* Return the zero-based number corresponding to the argument being
13260 deallocated if FNDECL is a deallocation function or UINT_MAX
13261 if it isn't. */
13263 unsigned
13264 fndecl_dealloc_argno (tree fndecl)
13266 /* A call to operator delete isn't recognized as one to a built-in. */
13267 if (DECL_IS_OPERATOR_DELETE_P (fndecl))
13268 return 0;
13270 /* TODO: Handle user-defined functions with attribute malloc? Handle
13271 known non-built-ins like fopen? */
13272 if (fndecl_built_in_p (fndecl, BUILT_IN_NORMAL))
13274 switch (DECL_FUNCTION_CODE (fndecl))
13276 case BUILT_IN_FREE:
13277 case BUILT_IN_REALLOC:
13278 return 0;
13279 default:
13280 break;
13282 return UINT_MAX;
13285 tree attrs = DECL_ATTRIBUTES (fndecl);
13286 if (!attrs)
13287 return UINT_MAX;
13289 for (tree atfree = attrs;
13290 (atfree = lookup_attribute ("*dealloc", atfree));
13291 atfree = TREE_CHAIN (atfree))
13293 tree alloc = TREE_VALUE (atfree);
13294 if (!alloc)
13295 continue;
13297 tree pos = TREE_CHAIN (alloc);
13298 if (!pos)
13299 return 0;
13301 pos = TREE_VALUE (pos);
13302 return TREE_INT_CST_LOW (pos) - 1;
13305 return UINT_MAX;
13308 /* Return true if DELC doesn't refer to an operator delete that's
13309 suitable to call with a pointer returned from the operator new
13310 described by NEWC. */
13312 static bool
13313 new_delete_mismatch_p (const demangle_component &newc,
13314 const demangle_component &delc)
13316 if (newc.type != delc.type)
13317 return true;
13319 switch (newc.type)
13321 case DEMANGLE_COMPONENT_NAME:
13323 int len = newc.u.s_name.len;
13324 const char *news = newc.u.s_name.s;
13325 const char *dels = delc.u.s_name.s;
13326 if (len != delc.u.s_name.len || memcmp (news, dels, len))
13327 return true;
13329 if (news[len] == 'n')
13331 if (news[len + 1] == 'a')
13332 return dels[len] != 'd' || dels[len + 1] != 'a';
13333 if (news[len + 1] == 'w')
13334 return dels[len] != 'd' || dels[len + 1] != 'l';
13336 return false;
13339 case DEMANGLE_COMPONENT_OPERATOR:
13340 /* Operator mismatches are handled above. */
13341 return false;
13343 case DEMANGLE_COMPONENT_EXTENDED_OPERATOR:
13344 if (newc.u.s_extended_operator.args != delc.u.s_extended_operator.args)
13345 return true;
13346 return new_delete_mismatch_p (*newc.u.s_extended_operator.name,
13347 *delc.u.s_extended_operator.name);
13349 case DEMANGLE_COMPONENT_FIXED_TYPE:
13350 if (newc.u.s_fixed.accum != delc.u.s_fixed.accum
13351 || newc.u.s_fixed.sat != delc.u.s_fixed.sat)
13352 return true;
13353 return new_delete_mismatch_p (*newc.u.s_fixed.length,
13354 *delc.u.s_fixed.length);
13356 case DEMANGLE_COMPONENT_CTOR:
13357 if (newc.u.s_ctor.kind != delc.u.s_ctor.kind)
13358 return true;
13359 return new_delete_mismatch_p (*newc.u.s_ctor.name,
13360 *delc.u.s_ctor.name);
13362 case DEMANGLE_COMPONENT_DTOR:
13363 if (newc.u.s_dtor.kind != delc.u.s_dtor.kind)
13364 return true;
13365 return new_delete_mismatch_p (*newc.u.s_dtor.name,
13366 *delc.u.s_dtor.name);
13368 case DEMANGLE_COMPONENT_BUILTIN_TYPE:
13370 /* The demangler API provides no better way to compare built-in
13371 types except to by comparing their demangled names. */
13372 size_t nsz, dsz;
13373 demangle_component *pnc = const_cast<demangle_component *>(&newc);
13374 demangle_component *pdc = const_cast<demangle_component *>(&delc);
13375 char *nts = cplus_demangle_print (0, pnc, 16, &nsz);
13376 char *dts = cplus_demangle_print (0, pdc, 16, &dsz);
13377 if (!nts != !dts)
13378 return true;
13379 bool mismatch = strcmp (nts, dts);
13380 free (nts);
13381 free (dts);
13382 return mismatch;
13385 case DEMANGLE_COMPONENT_SUB_STD:
13386 if (newc.u.s_string.len != delc.u.s_string.len)
13387 return true;
13388 return memcmp (newc.u.s_string.string, delc.u.s_string.string,
13389 newc.u.s_string.len);
13391 case DEMANGLE_COMPONENT_FUNCTION_PARAM:
13392 case DEMANGLE_COMPONENT_TEMPLATE_PARAM:
13393 return newc.u.s_number.number != delc.u.s_number.number;
13395 case DEMANGLE_COMPONENT_CHARACTER:
13396 return newc.u.s_character.character != delc.u.s_character.character;
13398 case DEMANGLE_COMPONENT_DEFAULT_ARG:
13399 case DEMANGLE_COMPONENT_LAMBDA:
13400 if (newc.u.s_unary_num.num != delc.u.s_unary_num.num)
13401 return true;
13402 return new_delete_mismatch_p (*newc.u.s_unary_num.sub,
13403 *delc.u.s_unary_num.sub);
13404 default:
13405 break;
13408 if (!newc.u.s_binary.left != !delc.u.s_binary.left)
13409 return true;
13411 if (!newc.u.s_binary.left)
13412 return false;
13414 if (new_delete_mismatch_p (*newc.u.s_binary.left, *delc.u.s_binary.left)
13415 || !newc.u.s_binary.right != !delc.u.s_binary.right)
13416 return true;
13418 if (newc.u.s_binary.right)
13419 return new_delete_mismatch_p (*newc.u.s_binary.right,
13420 *delc.u.s_binary.right);
13421 return false;
13424 /* Return true if DELETE_DECL is an operator delete that's not suitable
13425 to call with a pointer returned fron NEW_DECL. */
13427 static bool
13428 new_delete_mismatch_p (tree new_decl, tree delete_decl)
13430 tree new_name = DECL_ASSEMBLER_NAME (new_decl);
13431 tree delete_name = DECL_ASSEMBLER_NAME (delete_decl);
13433 /* valid_new_delete_pair_p() returns a conservative result (currently
13434 it only handles global operators). A true result is reliable but
13435 a false result doesn't necessarily mean the operators don't match. */
13436 if (valid_new_delete_pair_p (new_name, delete_name))
13437 return false;
13439 /* For anything not handled by valid_new_delete_pair_p() such as member
13440 operators compare the individual demangled components of the mangled
13441 name. */
13442 const char *new_str = IDENTIFIER_POINTER (new_name);
13443 const char *del_str = IDENTIFIER_POINTER (delete_name);
13445 void *np = NULL, *dp = NULL;
13446 demangle_component *ndc = cplus_demangle_v3_components (new_str, 0, &np);
13447 demangle_component *ddc = cplus_demangle_v3_components (del_str, 0, &dp);
13448 bool mismatch = new_delete_mismatch_p (*ndc, *ddc);
13449 free (np);
13450 free (dp);
13451 return mismatch;
13454 /* ALLOC_DECL and DEALLOC_DECL are pair of allocation and deallocation
13455 functions. Return true if the latter is suitable to deallocate objects
13456 allocated by calls to the former. */
13458 static bool
13459 matching_alloc_calls_p (tree alloc_decl, tree dealloc_decl)
13461 /* Set to alloc_kind_t::builtin if ALLOC_DECL is associated with
13462 a built-in deallocator. */
13463 enum class alloc_kind_t { none, builtin, user }
13464 alloc_dealloc_kind = alloc_kind_t::none;
13466 if (DECL_IS_OPERATOR_NEW_P (alloc_decl))
13468 if (DECL_IS_OPERATOR_DELETE_P (dealloc_decl))
13469 /* Return true iff both functions are of the same array or
13470 singleton form and false otherwise. */
13471 return !new_delete_mismatch_p (alloc_decl, dealloc_decl);
13473 /* Return false for deallocation functions that are known not
13474 to match. */
13475 if (fndecl_built_in_p (dealloc_decl, BUILT_IN_FREE)
13476 || fndecl_built_in_p (dealloc_decl, BUILT_IN_REALLOC))
13477 return false;
13478 /* Otherwise proceed below to check the deallocation function's
13479 "*dealloc" attributes to look for one that mentions this operator
13480 new. */
13482 else if (fndecl_built_in_p (alloc_decl, BUILT_IN_NORMAL))
13484 switch (DECL_FUNCTION_CODE (alloc_decl))
13486 case BUILT_IN_ALLOCA:
13487 case BUILT_IN_ALLOCA_WITH_ALIGN:
13488 return false;
13490 case BUILT_IN_ALIGNED_ALLOC:
13491 case BUILT_IN_CALLOC:
13492 case BUILT_IN_GOMP_ALLOC:
13493 case BUILT_IN_MALLOC:
13494 case BUILT_IN_REALLOC:
13495 case BUILT_IN_STRDUP:
13496 case BUILT_IN_STRNDUP:
13497 if (DECL_IS_OPERATOR_DELETE_P (dealloc_decl))
13498 return false;
13500 if (fndecl_built_in_p (dealloc_decl, BUILT_IN_FREE)
13501 || fndecl_built_in_p (dealloc_decl, BUILT_IN_REALLOC))
13502 return true;
13504 alloc_dealloc_kind = alloc_kind_t::builtin;
13505 break;
13507 default:
13508 break;
13512 /* Set if DEALLOC_DECL both allocates and deallocates. */
13513 alloc_kind_t realloc_kind = alloc_kind_t::none;
13515 if (fndecl_built_in_p (dealloc_decl, BUILT_IN_NORMAL))
13517 built_in_function dealloc_code = DECL_FUNCTION_CODE (dealloc_decl);
13518 if (dealloc_code == BUILT_IN_REALLOC)
13519 realloc_kind = alloc_kind_t::builtin;
13521 for (tree amats = DECL_ATTRIBUTES (alloc_decl);
13522 (amats = lookup_attribute ("malloc", amats));
13523 amats = TREE_CHAIN (amats))
13525 tree args = TREE_VALUE (amats);
13526 if (!args)
13527 continue;
13529 tree fndecl = TREE_VALUE (args);
13530 if (!fndecl || !DECL_P (fndecl))
13531 continue;
13533 if (fndecl_built_in_p (fndecl, BUILT_IN_NORMAL)
13534 && dealloc_code == DECL_FUNCTION_CODE (fndecl))
13535 return true;
13539 const bool alloc_builtin = fndecl_built_in_p (alloc_decl, BUILT_IN_NORMAL);
13540 alloc_kind_t realloc_dealloc_kind = alloc_kind_t::none;
13542 /* If DEALLOC_DECL has an internal "*dealloc" attribute scan the list
13543 of its associated allocation functions for ALLOC_DECL.
13544 If the corresponding ALLOC_DECL is found they're a matching pair,
13545 otherwise they're not.
13546 With DDATS set to the Deallocator's *Dealloc ATtributes... */
13547 for (tree ddats = DECL_ATTRIBUTES (dealloc_decl);
13548 (ddats = lookup_attribute ("*dealloc", ddats));
13549 ddats = TREE_CHAIN (ddats))
13551 tree args = TREE_VALUE (ddats);
13552 if (!args)
13553 continue;
13555 tree alloc = TREE_VALUE (args);
13556 if (!alloc)
13557 continue;
13559 if (alloc == DECL_NAME (dealloc_decl))
13560 realloc_kind = alloc_kind_t::user;
13562 if (DECL_P (alloc))
13564 gcc_checking_assert (fndecl_built_in_p (alloc, BUILT_IN_NORMAL));
13566 switch (DECL_FUNCTION_CODE (alloc))
13568 case BUILT_IN_ALIGNED_ALLOC:
13569 case BUILT_IN_CALLOC:
13570 case BUILT_IN_GOMP_ALLOC:
13571 case BUILT_IN_MALLOC:
13572 case BUILT_IN_REALLOC:
13573 case BUILT_IN_STRDUP:
13574 case BUILT_IN_STRNDUP:
13575 realloc_dealloc_kind = alloc_kind_t::builtin;
13576 break;
13577 default:
13578 break;
13581 if (!alloc_builtin)
13582 continue;
13584 if (DECL_FUNCTION_CODE (alloc) != DECL_FUNCTION_CODE (alloc_decl))
13585 continue;
13587 return true;
13590 if (alloc == DECL_NAME (alloc_decl))
13591 return true;
13594 if (realloc_kind == alloc_kind_t::none)
13595 return false;
13597 hash_set<tree> common_deallocs;
13598 /* Special handling for deallocators. Iterate over both the allocator's
13599 and the reallocator's associated deallocator functions looking for
13600 the first one in common. If one is found, the de/reallocator is
13601 a match for the allocator even though the latter isn't directly
13602 associated with the former. This simplifies declarations in system
13603 headers.
13604 With AMATS set to the Allocator's Malloc ATtributes,
13605 and RMATS set to Reallocator's Malloc ATtributes... */
13606 for (tree amats = DECL_ATTRIBUTES (alloc_decl),
13607 rmats = DECL_ATTRIBUTES (dealloc_decl);
13608 (amats = lookup_attribute ("malloc", amats))
13609 || (rmats = lookup_attribute ("malloc", rmats));
13610 amats = amats ? TREE_CHAIN (amats) : NULL_TREE,
13611 rmats = rmats ? TREE_CHAIN (rmats) : NULL_TREE)
13613 if (tree args = amats ? TREE_VALUE (amats) : NULL_TREE)
13614 if (tree adealloc = TREE_VALUE (args))
13616 if (DECL_P (adealloc)
13617 && fndecl_built_in_p (adealloc, BUILT_IN_NORMAL))
13619 built_in_function fncode = DECL_FUNCTION_CODE (adealloc);
13620 if (fncode == BUILT_IN_FREE || fncode == BUILT_IN_REALLOC)
13622 if (realloc_kind == alloc_kind_t::builtin)
13623 return true;
13624 alloc_dealloc_kind = alloc_kind_t::builtin;
13626 continue;
13629 common_deallocs.add (adealloc);
13632 if (tree args = rmats ? TREE_VALUE (rmats) : NULL_TREE)
13633 if (tree ddealloc = TREE_VALUE (args))
13635 if (DECL_P (ddealloc)
13636 && fndecl_built_in_p (ddealloc, BUILT_IN_NORMAL))
13638 built_in_function fncode = DECL_FUNCTION_CODE (ddealloc);
13639 if (fncode == BUILT_IN_FREE || fncode == BUILT_IN_REALLOC)
13641 if (alloc_dealloc_kind == alloc_kind_t::builtin)
13642 return true;
13643 realloc_dealloc_kind = alloc_kind_t::builtin;
13645 continue;
13648 if (common_deallocs.add (ddealloc))
13649 return true;
13653 /* Succeed only if ALLOC_DECL and the reallocator DEALLOC_DECL share
13654 a built-in deallocator. */
13655 return (alloc_dealloc_kind == alloc_kind_t::builtin
13656 && realloc_dealloc_kind == alloc_kind_t::builtin);
13659 /* Return true if DEALLOC_DECL is a function suitable to deallocate
13660 objectes allocated by the ALLOC call. */
13662 static bool
13663 matching_alloc_calls_p (gimple *alloc, tree dealloc_decl)
13665 tree alloc_decl = gimple_call_fndecl (alloc);
13666 if (!alloc_decl)
13667 return true;
13669 return matching_alloc_calls_p (alloc_decl, dealloc_decl);
13672 /* Diagnose a call EXP to deallocate a pointer referenced by AREF if it
13673 includes a nonzero offset. Such a pointer cannot refer to the beginning
13674 of an allocated object. A negative offset may refer to it only if
13675 the target pointer is unknown. */
13677 static bool
13678 warn_dealloc_offset (location_t loc, tree exp, const access_ref &aref)
13680 if (aref.deref || aref.offrng[0] <= 0 || aref.offrng[1] <= 0)
13681 return false;
13683 tree dealloc_decl = get_callee_fndecl (exp);
13684 if (!dealloc_decl)
13685 return false;
13687 if (DECL_IS_OPERATOR_DELETE_P (dealloc_decl)
13688 && !DECL_IS_REPLACEABLE_OPERATOR (dealloc_decl))
13690 /* A call to a user-defined operator delete with a pointer plus offset
13691 may be valid if it's returned from an unknown function (i.e., one
13692 that's not operator new). */
13693 if (TREE_CODE (aref.ref) == SSA_NAME)
13695 gimple *def_stmt = SSA_NAME_DEF_STMT (aref.ref);
13696 if (is_gimple_call (def_stmt))
13698 tree alloc_decl = gimple_call_fndecl (def_stmt);
13699 if (!alloc_decl || !DECL_IS_OPERATOR_NEW_P (alloc_decl))
13700 return false;
13705 char offstr[80];
13706 offstr[0] = '\0';
13707 if (wi::fits_shwi_p (aref.offrng[0]))
13709 if (aref.offrng[0] == aref.offrng[1]
13710 || !wi::fits_shwi_p (aref.offrng[1]))
13711 sprintf (offstr, " %lli",
13712 (long long)aref.offrng[0].to_shwi ());
13713 else
13714 sprintf (offstr, " [%lli, %lli]",
13715 (long long)aref.offrng[0].to_shwi (),
13716 (long long)aref.offrng[1].to_shwi ());
13719 if (!warning_at (loc, OPT_Wfree_nonheap_object,
13720 "%K%qD called on pointer %qE with nonzero offset%s",
13721 exp, dealloc_decl, aref.ref, offstr))
13722 return false;
13724 if (DECL_P (aref.ref))
13725 inform (DECL_SOURCE_LOCATION (aref.ref), "declared here");
13726 else if (TREE_CODE (aref.ref) == SSA_NAME)
13728 gimple *def_stmt = SSA_NAME_DEF_STMT (aref.ref);
13729 if (is_gimple_call (def_stmt))
13731 location_t def_loc = gimple_location (def_stmt);
13732 tree alloc_decl = gimple_call_fndecl (def_stmt);
13733 if (alloc_decl)
13734 inform (def_loc,
13735 "returned from %qD", alloc_decl);
13736 else if (tree alloc_fntype = gimple_call_fntype (def_stmt))
13737 inform (def_loc,
13738 "returned from %qT", alloc_fntype);
13739 else
13740 inform (def_loc, "obtained here");
13744 return true;
13747 /* Issue a warning if a deallocation function such as free, realloc,
13748 or C++ operator delete is called with an argument not returned by
13749 a matching allocation function such as malloc or the corresponding
13750 form of C++ operatorn new. */
13752 void
13753 maybe_emit_free_warning (tree exp)
13755 tree fndecl = get_callee_fndecl (exp);
13756 if (!fndecl)
13757 return;
13759 unsigned argno = call_dealloc_argno (exp);
13760 if ((unsigned) call_expr_nargs (exp) <= argno)
13761 return;
13763 tree ptr = CALL_EXPR_ARG (exp, argno);
13764 if (integer_zerop (ptr))
13765 return;
13767 access_ref aref;
13768 if (!compute_objsize (ptr, 0, &aref))
13769 return;
13771 tree ref = aref.ref;
13772 if (integer_zerop (ref))
13773 return;
13775 tree dealloc_decl = get_callee_fndecl (exp);
13776 location_t loc = tree_inlined_location (exp);
13778 if (DECL_P (ref) || EXPR_P (ref))
13780 /* Diagnose freeing a declared object. */
13781 if (aref.ref_declared ()
13782 && warning_at (loc, OPT_Wfree_nonheap_object,
13783 "%K%qD called on unallocated object %qD",
13784 exp, dealloc_decl, ref))
13786 loc = (DECL_P (ref)
13787 ? DECL_SOURCE_LOCATION (ref)
13788 : EXPR_LOCATION (ref));
13789 inform (loc, "declared here");
13790 return;
13793 /* Diagnose freeing a pointer that includes a positive offset.
13794 Such a pointer cannot refer to the beginning of an allocated
13795 object. A negative offset may refer to it. */
13796 if (aref.sizrng[0] != aref.sizrng[1]
13797 && warn_dealloc_offset (loc, exp, aref))
13798 return;
13800 else if (CONSTANT_CLASS_P (ref))
13802 if (warning_at (loc, OPT_Wfree_nonheap_object,
13803 "%K%qD called on a pointer to an unallocated "
13804 "object %qE", exp, dealloc_decl, ref))
13806 if (TREE_CODE (ptr) == SSA_NAME)
13808 gimple *def_stmt = SSA_NAME_DEF_STMT (ptr);
13809 if (is_gimple_assign (def_stmt))
13811 location_t loc = gimple_location (def_stmt);
13812 inform (loc, "assigned here");
13815 return;
13818 else if (TREE_CODE (ref) == SSA_NAME)
13820 /* Also warn if the pointer argument refers to the result
13821 of an allocation call like alloca or VLA. */
13822 gimple *def_stmt = SSA_NAME_DEF_STMT (ref);
13823 if (is_gimple_call (def_stmt))
13825 bool warned = false;
13826 if (gimple_call_alloc_p (def_stmt))
13828 if (matching_alloc_calls_p (def_stmt, dealloc_decl))
13830 if (warn_dealloc_offset (loc, exp, aref))
13831 return;
13833 else
13835 tree alloc_decl = gimple_call_fndecl (def_stmt);
13836 int opt = (DECL_IS_OPERATOR_NEW_P (alloc_decl)
13837 || DECL_IS_OPERATOR_DELETE_P (dealloc_decl)
13838 ? OPT_Wmismatched_new_delete
13839 : OPT_Wmismatched_dealloc);
13840 warned = warning_at (loc, opt,
13841 "%K%qD called on pointer returned "
13842 "from a mismatched allocation "
13843 "function", exp, dealloc_decl);
13846 else if (gimple_call_builtin_p (def_stmt, BUILT_IN_ALLOCA)
13847 || gimple_call_builtin_p (def_stmt,
13848 BUILT_IN_ALLOCA_WITH_ALIGN))
13849 warned = warning_at (loc, OPT_Wfree_nonheap_object,
13850 "%K%qD called on pointer to "
13851 "an unallocated object",
13852 exp, dealloc_decl);
13853 else if (warn_dealloc_offset (loc, exp, aref))
13854 return;
13856 if (warned)
13858 tree fndecl = gimple_call_fndecl (def_stmt);
13859 inform (gimple_location (def_stmt),
13860 "returned from %qD", fndecl);
13861 return;
13864 else if (gimple_nop_p (def_stmt))
13866 ref = SSA_NAME_VAR (ref);
13867 /* Diagnose freeing a pointer that includes a positive offset. */
13868 if (TREE_CODE (ref) == PARM_DECL
13869 && !aref.deref
13870 && aref.sizrng[0] != aref.sizrng[1]
13871 && aref.offrng[0] > 0 && aref.offrng[1] > 0
13872 && warn_dealloc_offset (loc, exp, aref))
13873 return;
13878 /* Fold a call to __builtin_object_size with arguments PTR and OST,
13879 if possible. */
13881 static tree
13882 fold_builtin_object_size (tree ptr, tree ost)
13884 unsigned HOST_WIDE_INT bytes;
13885 int object_size_type;
13887 if (!validate_arg (ptr, POINTER_TYPE)
13888 || !validate_arg (ost, INTEGER_TYPE))
13889 return NULL_TREE;
13891 STRIP_NOPS (ost);
13893 if (TREE_CODE (ost) != INTEGER_CST
13894 || tree_int_cst_sgn (ost) < 0
13895 || compare_tree_int (ost, 3) > 0)
13896 return NULL_TREE;
13898 object_size_type = tree_to_shwi (ost);
13900 /* __builtin_object_size doesn't evaluate side-effects in its arguments;
13901 if there are any side-effects, it returns (size_t) -1 for types 0 and 1
13902 and (size_t) 0 for types 2 and 3. */
13903 if (TREE_SIDE_EFFECTS (ptr))
13904 return build_int_cst_type (size_type_node, object_size_type < 2 ? -1 : 0);
13906 if (TREE_CODE (ptr) == ADDR_EXPR)
13908 compute_builtin_object_size (ptr, object_size_type, &bytes);
13909 if (wi::fits_to_tree_p (bytes, size_type_node))
13910 return build_int_cstu (size_type_node, bytes);
13912 else if (TREE_CODE (ptr) == SSA_NAME)
13914 /* If object size is not known yet, delay folding until
13915 later. Maybe subsequent passes will help determining
13916 it. */
13917 if (compute_builtin_object_size (ptr, object_size_type, &bytes)
13918 && wi::fits_to_tree_p (bytes, size_type_node))
13919 return build_int_cstu (size_type_node, bytes);
13922 return NULL_TREE;
13925 /* Builtins with folding operations that operate on "..." arguments
13926 need special handling; we need to store the arguments in a convenient
13927 data structure before attempting any folding. Fortunately there are
13928 only a few builtins that fall into this category. FNDECL is the
13929 function, EXP is the CALL_EXPR for the call. */
13931 static tree
13932 fold_builtin_varargs (location_t loc, tree fndecl, tree *args, int nargs)
13934 enum built_in_function fcode = DECL_FUNCTION_CODE (fndecl);
13935 tree ret = NULL_TREE;
13937 switch (fcode)
13939 case BUILT_IN_FPCLASSIFY:
13940 ret = fold_builtin_fpclassify (loc, args, nargs);
13941 break;
13943 default:
13944 break;
13946 if (ret)
13948 ret = build1 (NOP_EXPR, TREE_TYPE (ret), ret);
13949 SET_EXPR_LOCATION (ret, loc);
13950 TREE_NO_WARNING (ret) = 1;
13951 return ret;
13953 return NULL_TREE;
13956 /* Initialize format string characters in the target charset. */
13958 bool
13959 init_target_chars (void)
13961 static bool init;
13962 if (!init)
13964 target_newline = lang_hooks.to_target_charset ('\n');
13965 target_percent = lang_hooks.to_target_charset ('%');
13966 target_c = lang_hooks.to_target_charset ('c');
13967 target_s = lang_hooks.to_target_charset ('s');
13968 if (target_newline == 0 || target_percent == 0 || target_c == 0
13969 || target_s == 0)
13970 return false;
13972 target_percent_c[0] = target_percent;
13973 target_percent_c[1] = target_c;
13974 target_percent_c[2] = '\0';
13976 target_percent_s[0] = target_percent;
13977 target_percent_s[1] = target_s;
13978 target_percent_s[2] = '\0';
13980 target_percent_s_newline[0] = target_percent;
13981 target_percent_s_newline[1] = target_s;
13982 target_percent_s_newline[2] = target_newline;
13983 target_percent_s_newline[3] = '\0';
13985 init = true;
13987 return true;
13990 /* Helper function for do_mpfr_arg*(). Ensure M is a normal number
13991 and no overflow/underflow occurred. INEXACT is true if M was not
13992 exactly calculated. TYPE is the tree type for the result. This
13993 function assumes that you cleared the MPFR flags and then
13994 calculated M to see if anything subsequently set a flag prior to
13995 entering this function. Return NULL_TREE if any checks fail. */
13997 static tree
13998 do_mpfr_ckconv (mpfr_srcptr m, tree type, int inexact)
14000 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
14001 overflow/underflow occurred. If -frounding-math, proceed iff the
14002 result of calling FUNC was exact. */
14003 if (mpfr_number_p (m) && !mpfr_overflow_p () && !mpfr_underflow_p ()
14004 && (!flag_rounding_math || !inexact))
14006 REAL_VALUE_TYPE rr;
14008 real_from_mpfr (&rr, m, type, MPFR_RNDN);
14009 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR value,
14010 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
14011 but the mpft_t is not, then we underflowed in the
14012 conversion. */
14013 if (real_isfinite (&rr)
14014 && (rr.cl == rvc_zero) == (mpfr_zero_p (m) != 0))
14016 REAL_VALUE_TYPE rmode;
14018 real_convert (&rmode, TYPE_MODE (type), &rr);
14019 /* Proceed iff the specified mode can hold the value. */
14020 if (real_identical (&rmode, &rr))
14021 return build_real (type, rmode);
14024 return NULL_TREE;
14027 /* Helper function for do_mpc_arg*(). Ensure M is a normal complex
14028 number and no overflow/underflow occurred. INEXACT is true if M
14029 was not exactly calculated. TYPE is the tree type for the result.
14030 This function assumes that you cleared the MPFR flags and then
14031 calculated M to see if anything subsequently set a flag prior to
14032 entering this function. Return NULL_TREE if any checks fail, if
14033 FORCE_CONVERT is true, then bypass the checks. */
14035 static tree
14036 do_mpc_ckconv (mpc_srcptr m, tree type, int inexact, int force_convert)
14038 /* Proceed iff we get a normal number, i.e. not NaN or Inf and no
14039 overflow/underflow occurred. If -frounding-math, proceed iff the
14040 result of calling FUNC was exact. */
14041 if (force_convert
14042 || (mpfr_number_p (mpc_realref (m)) && mpfr_number_p (mpc_imagref (m))
14043 && !mpfr_overflow_p () && !mpfr_underflow_p ()
14044 && (!flag_rounding_math || !inexact)))
14046 REAL_VALUE_TYPE re, im;
14048 real_from_mpfr (&re, mpc_realref (m), TREE_TYPE (type), MPFR_RNDN);
14049 real_from_mpfr (&im, mpc_imagref (m), TREE_TYPE (type), MPFR_RNDN);
14050 /* Proceed iff GCC's REAL_VALUE_TYPE can hold the MPFR values,
14051 check for overflow/underflow. If the REAL_VALUE_TYPE is zero
14052 but the mpft_t is not, then we underflowed in the
14053 conversion. */
14054 if (force_convert
14055 || (real_isfinite (&re) && real_isfinite (&im)
14056 && (re.cl == rvc_zero) == (mpfr_zero_p (mpc_realref (m)) != 0)
14057 && (im.cl == rvc_zero) == (mpfr_zero_p (mpc_imagref (m)) != 0)))
14059 REAL_VALUE_TYPE re_mode, im_mode;
14061 real_convert (&re_mode, TYPE_MODE (TREE_TYPE (type)), &re);
14062 real_convert (&im_mode, TYPE_MODE (TREE_TYPE (type)), &im);
14063 /* Proceed iff the specified mode can hold the value. */
14064 if (force_convert
14065 || (real_identical (&re_mode, &re)
14066 && real_identical (&im_mode, &im)))
14067 return build_complex (type, build_real (TREE_TYPE (type), re_mode),
14068 build_real (TREE_TYPE (type), im_mode));
14071 return NULL_TREE;
14074 /* If arguments ARG0 and ARG1 are REAL_CSTs, call mpfr_remquo() to set
14075 the pointer *(ARG_QUO) and return the result. The type is taken
14076 from the type of ARG0 and is used for setting the precision of the
14077 calculation and results. */
14079 static tree
14080 do_mpfr_remquo (tree arg0, tree arg1, tree arg_quo)
14082 tree const type = TREE_TYPE (arg0);
14083 tree result = NULL_TREE;
14085 STRIP_NOPS (arg0);
14086 STRIP_NOPS (arg1);
14088 /* To proceed, MPFR must exactly represent the target floating point
14089 format, which only happens when the target base equals two. */
14090 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
14091 && TREE_CODE (arg0) == REAL_CST && !TREE_OVERFLOW (arg0)
14092 && TREE_CODE (arg1) == REAL_CST && !TREE_OVERFLOW (arg1))
14094 const REAL_VALUE_TYPE *const ra0 = TREE_REAL_CST_PTR (arg0);
14095 const REAL_VALUE_TYPE *const ra1 = TREE_REAL_CST_PTR (arg1);
14097 if (real_isfinite (ra0) && real_isfinite (ra1))
14099 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
14100 const int prec = fmt->p;
14101 const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
14102 tree result_rem;
14103 long integer_quo;
14104 mpfr_t m0, m1;
14106 mpfr_inits2 (prec, m0, m1, NULL);
14107 mpfr_from_real (m0, ra0, MPFR_RNDN);
14108 mpfr_from_real (m1, ra1, MPFR_RNDN);
14109 mpfr_clear_flags ();
14110 mpfr_remquo (m0, &integer_quo, m0, m1, rnd);
14111 /* Remquo is independent of the rounding mode, so pass
14112 inexact=0 to do_mpfr_ckconv(). */
14113 result_rem = do_mpfr_ckconv (m0, type, /*inexact=*/ 0);
14114 mpfr_clears (m0, m1, NULL);
14115 if (result_rem)
14117 /* MPFR calculates quo in the host's long so it may
14118 return more bits in quo than the target int can hold
14119 if sizeof(host long) > sizeof(target int). This can
14120 happen even for native compilers in LP64 mode. In
14121 these cases, modulo the quo value with the largest
14122 number that the target int can hold while leaving one
14123 bit for the sign. */
14124 if (sizeof (integer_quo) * CHAR_BIT > INT_TYPE_SIZE)
14125 integer_quo %= (long)(1UL << (INT_TYPE_SIZE - 1));
14127 /* Dereference the quo pointer argument. */
14128 arg_quo = build_fold_indirect_ref (arg_quo);
14129 /* Proceed iff a valid pointer type was passed in. */
14130 if (TYPE_MAIN_VARIANT (TREE_TYPE (arg_quo)) == integer_type_node)
14132 /* Set the value. */
14133 tree result_quo
14134 = fold_build2 (MODIFY_EXPR, TREE_TYPE (arg_quo), arg_quo,
14135 build_int_cst (TREE_TYPE (arg_quo),
14136 integer_quo));
14137 TREE_SIDE_EFFECTS (result_quo) = 1;
14138 /* Combine the quo assignment with the rem. */
14139 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
14140 result_quo, result_rem));
14145 return result;
14148 /* If ARG is a REAL_CST, call mpfr_lgamma() on it and return the
14149 resulting value as a tree with type TYPE. The mpfr precision is
14150 set to the precision of TYPE. We assume that this mpfr function
14151 returns zero if the result could be calculated exactly within the
14152 requested precision. In addition, the integer pointer represented
14153 by ARG_SG will be dereferenced and set to the appropriate signgam
14154 (-1,1) value. */
14156 static tree
14157 do_mpfr_lgamma_r (tree arg, tree arg_sg, tree type)
14159 tree result = NULL_TREE;
14161 STRIP_NOPS (arg);
14163 /* To proceed, MPFR must exactly represent the target floating point
14164 format, which only happens when the target base equals two. Also
14165 verify ARG is a constant and that ARG_SG is an int pointer. */
14166 if (REAL_MODE_FORMAT (TYPE_MODE (type))->b == 2
14167 && TREE_CODE (arg) == REAL_CST && !TREE_OVERFLOW (arg)
14168 && TREE_CODE (TREE_TYPE (arg_sg)) == POINTER_TYPE
14169 && TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (arg_sg))) == integer_type_node)
14171 const REAL_VALUE_TYPE *const ra = TREE_REAL_CST_PTR (arg);
14173 /* In addition to NaN and Inf, the argument cannot be zero or a
14174 negative integer. */
14175 if (real_isfinite (ra)
14176 && ra->cl != rvc_zero
14177 && !(real_isneg (ra) && real_isinteger (ra, TYPE_MODE (type))))
14179 const struct real_format *fmt = REAL_MODE_FORMAT (TYPE_MODE (type));
14180 const int prec = fmt->p;
14181 const mpfr_rnd_t rnd = fmt->round_towards_zero? MPFR_RNDZ : MPFR_RNDN;
14182 int inexact, sg;
14183 mpfr_t m;
14184 tree result_lg;
14186 mpfr_init2 (m, prec);
14187 mpfr_from_real (m, ra, MPFR_RNDN);
14188 mpfr_clear_flags ();
14189 inexact = mpfr_lgamma (m, &sg, m, rnd);
14190 result_lg = do_mpfr_ckconv (m, type, inexact);
14191 mpfr_clear (m);
14192 if (result_lg)
14194 tree result_sg;
14196 /* Dereference the arg_sg pointer argument. */
14197 arg_sg = build_fold_indirect_ref (arg_sg);
14198 /* Assign the signgam value into *arg_sg. */
14199 result_sg = fold_build2 (MODIFY_EXPR,
14200 TREE_TYPE (arg_sg), arg_sg,
14201 build_int_cst (TREE_TYPE (arg_sg), sg));
14202 TREE_SIDE_EFFECTS (result_sg) = 1;
14203 /* Combine the signgam assignment with the lgamma result. */
14204 result = non_lvalue (fold_build2 (COMPOUND_EXPR, type,
14205 result_sg, result_lg));
14210 return result;
14213 /* If arguments ARG0 and ARG1 are a COMPLEX_CST, call the two-argument
14214 mpc function FUNC on it and return the resulting value as a tree
14215 with type TYPE. The mpfr precision is set to the precision of
14216 TYPE. We assume that function FUNC returns zero if the result
14217 could be calculated exactly within the requested precision. If
14218 DO_NONFINITE is true, then fold expressions containing Inf or NaN
14219 in the arguments and/or results. */
14221 tree
14222 do_mpc_arg2 (tree arg0, tree arg1, tree type, int do_nonfinite,
14223 int (*func)(mpc_ptr, mpc_srcptr, mpc_srcptr, mpc_rnd_t))
14225 tree result = NULL_TREE;
14227 STRIP_NOPS (arg0);
14228 STRIP_NOPS (arg1);
14230 /* To proceed, MPFR must exactly represent the target floating point
14231 format, which only happens when the target base equals two. */
14232 if (TREE_CODE (arg0) == COMPLEX_CST && !TREE_OVERFLOW (arg0)
14233 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg0))) == REAL_TYPE
14234 && TREE_CODE (arg1) == COMPLEX_CST && !TREE_OVERFLOW (arg1)
14235 && TREE_CODE (TREE_TYPE (TREE_TYPE (arg1))) == REAL_TYPE
14236 && REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (TREE_TYPE (arg0))))->b == 2)
14238 const REAL_VALUE_TYPE *const re0 = TREE_REAL_CST_PTR (TREE_REALPART (arg0));
14239 const REAL_VALUE_TYPE *const im0 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg0));
14240 const REAL_VALUE_TYPE *const re1 = TREE_REAL_CST_PTR (TREE_REALPART (arg1));
14241 const REAL_VALUE_TYPE *const im1 = TREE_REAL_CST_PTR (TREE_IMAGPART (arg1));
14243 if (do_nonfinite
14244 || (real_isfinite (re0) && real_isfinite (im0)
14245 && real_isfinite (re1) && real_isfinite (im1)))
14247 const struct real_format *const fmt =
14248 REAL_MODE_FORMAT (TYPE_MODE (TREE_TYPE (type)));
14249 const int prec = fmt->p;
14250 const mpfr_rnd_t rnd = fmt->round_towards_zero
14251 ? MPFR_RNDZ : MPFR_RNDN;
14252 const mpc_rnd_t crnd = fmt->round_towards_zero ? MPC_RNDZZ : MPC_RNDNN;
14253 int inexact;
14254 mpc_t m0, m1;
14256 mpc_init2 (m0, prec);
14257 mpc_init2 (m1, prec);
14258 mpfr_from_real (mpc_realref (m0), re0, rnd);
14259 mpfr_from_real (mpc_imagref (m0), im0, rnd);
14260 mpfr_from_real (mpc_realref (m1), re1, rnd);
14261 mpfr_from_real (mpc_imagref (m1), im1, rnd);
14262 mpfr_clear_flags ();
14263 inexact = func (m0, m0, m1, crnd);
14264 result = do_mpc_ckconv (m0, type, inexact, do_nonfinite);
14265 mpc_clear (m0);
14266 mpc_clear (m1);
14270 return result;
14273 /* A wrapper function for builtin folding that prevents warnings for
14274 "statement without effect" and the like, caused by removing the
14275 call node earlier than the warning is generated. */
14277 tree
14278 fold_call_stmt (gcall *stmt, bool ignore)
14280 tree ret = NULL_TREE;
14281 tree fndecl = gimple_call_fndecl (stmt);
14282 location_t loc = gimple_location (stmt);
14283 if (fndecl && fndecl_built_in_p (fndecl)
14284 && !gimple_call_va_arg_pack_p (stmt))
14286 int nargs = gimple_call_num_args (stmt);
14287 tree *args = (nargs > 0
14288 ? gimple_call_arg_ptr (stmt, 0)
14289 : &error_mark_node);
14291 if (avoid_folding_inline_builtin (fndecl))
14292 return NULL_TREE;
14293 if (DECL_BUILT_IN_CLASS (fndecl) == BUILT_IN_MD)
14295 return targetm.fold_builtin (fndecl, nargs, args, ignore);
14297 else
14299 ret = fold_builtin_n (loc, NULL_TREE, fndecl, args, nargs, ignore);
14300 if (ret)
14302 /* Propagate location information from original call to
14303 expansion of builtin. Otherwise things like
14304 maybe_emit_chk_warning, that operate on the expansion
14305 of a builtin, will use the wrong location information. */
14306 if (gimple_has_location (stmt))
14308 tree realret = ret;
14309 if (TREE_CODE (ret) == NOP_EXPR)
14310 realret = TREE_OPERAND (ret, 0);
14311 if (CAN_HAVE_LOCATION_P (realret)
14312 && !EXPR_HAS_LOCATION (realret))
14313 SET_EXPR_LOCATION (realret, loc);
14314 return realret;
14316 return ret;
14320 return NULL_TREE;
14323 /* Look up the function in builtin_decl that corresponds to DECL
14324 and set ASMSPEC as its user assembler name. DECL must be a
14325 function decl that declares a builtin. */
14327 void
14328 set_builtin_user_assembler_name (tree decl, const char *asmspec)
14330 gcc_assert (fndecl_built_in_p (decl, BUILT_IN_NORMAL)
14331 && asmspec != 0);
14333 tree builtin = builtin_decl_explicit (DECL_FUNCTION_CODE (decl));
14334 set_user_assembler_name (builtin, asmspec);
14336 if (DECL_FUNCTION_CODE (decl) == BUILT_IN_FFS
14337 && INT_TYPE_SIZE < BITS_PER_WORD)
14339 scalar_int_mode mode = int_mode_for_size (INT_TYPE_SIZE, 0).require ();
14340 set_user_assembler_libfunc ("ffs", asmspec);
14341 set_optab_libfunc (ffs_optab, mode, "ffs");
14345 /* Return true if DECL is a builtin that expands to a constant or similarly
14346 simple code. */
14347 bool
14348 is_simple_builtin (tree decl)
14350 if (decl && fndecl_built_in_p (decl, BUILT_IN_NORMAL))
14351 switch (DECL_FUNCTION_CODE (decl))
14353 /* Builtins that expand to constants. */
14354 case BUILT_IN_CONSTANT_P:
14355 case BUILT_IN_EXPECT:
14356 case BUILT_IN_OBJECT_SIZE:
14357 case BUILT_IN_UNREACHABLE:
14358 /* Simple register moves or loads from stack. */
14359 case BUILT_IN_ASSUME_ALIGNED:
14360 case BUILT_IN_RETURN_ADDRESS:
14361 case BUILT_IN_EXTRACT_RETURN_ADDR:
14362 case BUILT_IN_FROB_RETURN_ADDR:
14363 case BUILT_IN_RETURN:
14364 case BUILT_IN_AGGREGATE_INCOMING_ADDRESS:
14365 case BUILT_IN_FRAME_ADDRESS:
14366 case BUILT_IN_VA_END:
14367 case BUILT_IN_STACK_SAVE:
14368 case BUILT_IN_STACK_RESTORE:
14369 /* Exception state returns or moves registers around. */
14370 case BUILT_IN_EH_FILTER:
14371 case BUILT_IN_EH_POINTER:
14372 case BUILT_IN_EH_COPY_VALUES:
14373 return true;
14375 default:
14376 return false;
14379 return false;
14382 /* Return true if DECL is a builtin that is not expensive, i.e., they are
14383 most probably expanded inline into reasonably simple code. This is a
14384 superset of is_simple_builtin. */
14385 bool
14386 is_inexpensive_builtin (tree decl)
14388 if (!decl)
14389 return false;
14390 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_MD)
14391 return true;
14392 else if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
14393 switch (DECL_FUNCTION_CODE (decl))
14395 case BUILT_IN_ABS:
14396 CASE_BUILT_IN_ALLOCA:
14397 case BUILT_IN_BSWAP16:
14398 case BUILT_IN_BSWAP32:
14399 case BUILT_IN_BSWAP64:
14400 case BUILT_IN_BSWAP128:
14401 case BUILT_IN_CLZ:
14402 case BUILT_IN_CLZIMAX:
14403 case BUILT_IN_CLZL:
14404 case BUILT_IN_CLZLL:
14405 case BUILT_IN_CTZ:
14406 case BUILT_IN_CTZIMAX:
14407 case BUILT_IN_CTZL:
14408 case BUILT_IN_CTZLL:
14409 case BUILT_IN_FFS:
14410 case BUILT_IN_FFSIMAX:
14411 case BUILT_IN_FFSL:
14412 case BUILT_IN_FFSLL:
14413 case BUILT_IN_IMAXABS:
14414 case BUILT_IN_FINITE:
14415 case BUILT_IN_FINITEF:
14416 case BUILT_IN_FINITEL:
14417 case BUILT_IN_FINITED32:
14418 case BUILT_IN_FINITED64:
14419 case BUILT_IN_FINITED128:
14420 case BUILT_IN_FPCLASSIFY:
14421 case BUILT_IN_ISFINITE:
14422 case BUILT_IN_ISINF_SIGN:
14423 case BUILT_IN_ISINF:
14424 case BUILT_IN_ISINFF:
14425 case BUILT_IN_ISINFL:
14426 case BUILT_IN_ISINFD32:
14427 case BUILT_IN_ISINFD64:
14428 case BUILT_IN_ISINFD128:
14429 case BUILT_IN_ISNAN:
14430 case BUILT_IN_ISNANF:
14431 case BUILT_IN_ISNANL:
14432 case BUILT_IN_ISNAND32:
14433 case BUILT_IN_ISNAND64:
14434 case BUILT_IN_ISNAND128:
14435 case BUILT_IN_ISNORMAL:
14436 case BUILT_IN_ISGREATER:
14437 case BUILT_IN_ISGREATEREQUAL:
14438 case BUILT_IN_ISLESS:
14439 case BUILT_IN_ISLESSEQUAL:
14440 case BUILT_IN_ISLESSGREATER:
14441 case BUILT_IN_ISUNORDERED:
14442 case BUILT_IN_VA_ARG_PACK:
14443 case BUILT_IN_VA_ARG_PACK_LEN:
14444 case BUILT_IN_VA_COPY:
14445 case BUILT_IN_TRAP:
14446 case BUILT_IN_SAVEREGS:
14447 case BUILT_IN_POPCOUNTL:
14448 case BUILT_IN_POPCOUNTLL:
14449 case BUILT_IN_POPCOUNTIMAX:
14450 case BUILT_IN_POPCOUNT:
14451 case BUILT_IN_PARITYL:
14452 case BUILT_IN_PARITYLL:
14453 case BUILT_IN_PARITYIMAX:
14454 case BUILT_IN_PARITY:
14455 case BUILT_IN_LABS:
14456 case BUILT_IN_LLABS:
14457 case BUILT_IN_PREFETCH:
14458 case BUILT_IN_ACC_ON_DEVICE:
14459 return true;
14461 default:
14462 return is_simple_builtin (decl);
14465 return false;
14468 /* Return true if T is a constant and the value cast to a target char
14469 can be represented by a host char.
14470 Store the casted char constant in *P if so. */
14472 bool
14473 target_char_cst_p (tree t, char *p)
14475 if (!tree_fits_uhwi_p (t) || CHAR_TYPE_SIZE != HOST_BITS_PER_CHAR)
14476 return false;
14478 *p = (char)tree_to_uhwi (t);
14479 return true;
14482 /* Return true if the builtin DECL is implemented in a standard library.
14483 Otherwise returns false which doesn't guarantee it is not (thus the list of
14484 handled builtins below may be incomplete). */
14486 bool
14487 builtin_with_linkage_p (tree decl)
14489 if (DECL_BUILT_IN_CLASS (decl) == BUILT_IN_NORMAL)
14490 switch (DECL_FUNCTION_CODE (decl))
14492 CASE_FLT_FN (BUILT_IN_ACOS):
14493 CASE_FLT_FN (BUILT_IN_ACOSH):
14494 CASE_FLT_FN (BUILT_IN_ASIN):
14495 CASE_FLT_FN (BUILT_IN_ASINH):
14496 CASE_FLT_FN (BUILT_IN_ATAN):
14497 CASE_FLT_FN (BUILT_IN_ATANH):
14498 CASE_FLT_FN (BUILT_IN_ATAN2):
14499 CASE_FLT_FN (BUILT_IN_CBRT):
14500 CASE_FLT_FN (BUILT_IN_CEIL):
14501 CASE_FLT_FN_FLOATN_NX (BUILT_IN_CEIL):
14502 CASE_FLT_FN (BUILT_IN_COPYSIGN):
14503 CASE_FLT_FN_FLOATN_NX (BUILT_IN_COPYSIGN):
14504 CASE_FLT_FN (BUILT_IN_COS):
14505 CASE_FLT_FN (BUILT_IN_COSH):
14506 CASE_FLT_FN (BUILT_IN_ERF):
14507 CASE_FLT_FN (BUILT_IN_ERFC):
14508 CASE_FLT_FN (BUILT_IN_EXP):
14509 CASE_FLT_FN (BUILT_IN_EXP2):
14510 CASE_FLT_FN (BUILT_IN_EXPM1):
14511 CASE_FLT_FN (BUILT_IN_FABS):
14512 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FABS):
14513 CASE_FLT_FN (BUILT_IN_FDIM):
14514 CASE_FLT_FN (BUILT_IN_FLOOR):
14515 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FLOOR):
14516 CASE_FLT_FN (BUILT_IN_FMA):
14517 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMA):
14518 CASE_FLT_FN (BUILT_IN_FMAX):
14519 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMAX):
14520 CASE_FLT_FN (BUILT_IN_FMIN):
14521 CASE_FLT_FN_FLOATN_NX (BUILT_IN_FMIN):
14522 CASE_FLT_FN (BUILT_IN_FMOD):
14523 CASE_FLT_FN (BUILT_IN_FREXP):
14524 CASE_FLT_FN (BUILT_IN_HYPOT):
14525 CASE_FLT_FN (BUILT_IN_ILOGB):
14526 CASE_FLT_FN (BUILT_IN_LDEXP):
14527 CASE_FLT_FN (BUILT_IN_LGAMMA):
14528 CASE_FLT_FN (BUILT_IN_LLRINT):
14529 CASE_FLT_FN (BUILT_IN_LLROUND):
14530 CASE_FLT_FN (BUILT_IN_LOG):
14531 CASE_FLT_FN (BUILT_IN_LOG10):
14532 CASE_FLT_FN (BUILT_IN_LOG1P):
14533 CASE_FLT_FN (BUILT_IN_LOG2):
14534 CASE_FLT_FN (BUILT_IN_LOGB):
14535 CASE_FLT_FN (BUILT_IN_LRINT):
14536 CASE_FLT_FN (BUILT_IN_LROUND):
14537 CASE_FLT_FN (BUILT_IN_MODF):
14538 CASE_FLT_FN (BUILT_IN_NAN):
14539 CASE_FLT_FN (BUILT_IN_NEARBYINT):
14540 CASE_FLT_FN_FLOATN_NX (BUILT_IN_NEARBYINT):
14541 CASE_FLT_FN (BUILT_IN_NEXTAFTER):
14542 CASE_FLT_FN (BUILT_IN_NEXTTOWARD):
14543 CASE_FLT_FN (BUILT_IN_POW):
14544 CASE_FLT_FN (BUILT_IN_REMAINDER):
14545 CASE_FLT_FN (BUILT_IN_REMQUO):
14546 CASE_FLT_FN (BUILT_IN_RINT):
14547 CASE_FLT_FN_FLOATN_NX (BUILT_IN_RINT):
14548 CASE_FLT_FN (BUILT_IN_ROUND):
14549 CASE_FLT_FN_FLOATN_NX (BUILT_IN_ROUND):
14550 CASE_FLT_FN (BUILT_IN_SCALBLN):
14551 CASE_FLT_FN (BUILT_IN_SCALBN):
14552 CASE_FLT_FN (BUILT_IN_SIN):
14553 CASE_FLT_FN (BUILT_IN_SINH):
14554 CASE_FLT_FN (BUILT_IN_SINCOS):
14555 CASE_FLT_FN (BUILT_IN_SQRT):
14556 CASE_FLT_FN_FLOATN_NX (BUILT_IN_SQRT):
14557 CASE_FLT_FN (BUILT_IN_TAN):
14558 CASE_FLT_FN (BUILT_IN_TANH):
14559 CASE_FLT_FN (BUILT_IN_TGAMMA):
14560 CASE_FLT_FN (BUILT_IN_TRUNC):
14561 CASE_FLT_FN_FLOATN_NX (BUILT_IN_TRUNC):
14562 return true;
14563 default:
14564 break;
14566 return false;
14569 /* Return true if OFFRNG is bounded to a subrange of offset values
14570 valid for the largest possible object. */
14572 bool
14573 access_ref::offset_bounded () const
14575 tree min = TYPE_MIN_VALUE (ptrdiff_type_node);
14576 tree max = TYPE_MAX_VALUE (ptrdiff_type_node);
14577 return wi::to_offset (min) <= offrng[0] && offrng[1] <= wi::to_offset (max);
14580 /* If CALLEE has known side effects, fill in INFO and return true.
14581 See tree-ssa-structalias.c:find_func_aliases
14582 for the list of builtins we might need to handle here. */
14584 attr_fnspec
14585 builtin_fnspec (tree callee)
14587 built_in_function code = DECL_FUNCTION_CODE (callee);
14589 switch (code)
14591 /* All the following functions read memory pointed to by
14592 their second argument and write memory pointed to by first
14593 argument.
14594 strcat/strncat additionally reads memory pointed to by the first
14595 argument. */
14596 case BUILT_IN_STRCAT:
14597 case BUILT_IN_STRCAT_CHK:
14598 return "1cW 1 ";
14599 case BUILT_IN_STRNCAT:
14600 case BUILT_IN_STRNCAT_CHK:
14601 return "1cW 13";
14602 case BUILT_IN_STRCPY:
14603 case BUILT_IN_STRCPY_CHK:
14604 return "1cO 1 ";
14605 case BUILT_IN_STPCPY:
14606 case BUILT_IN_STPCPY_CHK:
14607 return ".cO 1 ";
14608 case BUILT_IN_STRNCPY:
14609 case BUILT_IN_MEMCPY:
14610 case BUILT_IN_MEMMOVE:
14611 case BUILT_IN_TM_MEMCPY:
14612 case BUILT_IN_TM_MEMMOVE:
14613 case BUILT_IN_STRNCPY_CHK:
14614 case BUILT_IN_MEMCPY_CHK:
14615 case BUILT_IN_MEMMOVE_CHK:
14616 return "1cO313";
14617 case BUILT_IN_MEMPCPY:
14618 case BUILT_IN_MEMPCPY_CHK:
14619 return ".cO313";
14620 case BUILT_IN_STPNCPY:
14621 case BUILT_IN_STPNCPY_CHK:
14622 return ".cO313";
14623 case BUILT_IN_BCOPY:
14624 return ".c23O3";
14625 case BUILT_IN_BZERO:
14626 return ".cO2";
14627 case BUILT_IN_MEMCMP:
14628 case BUILT_IN_MEMCMP_EQ:
14629 case BUILT_IN_BCMP:
14630 case BUILT_IN_STRNCMP:
14631 case BUILT_IN_STRNCMP_EQ:
14632 case BUILT_IN_STRNCASECMP:
14633 return ".cR3R3";
14635 /* The following functions read memory pointed to by their
14636 first argument. */
14637 CASE_BUILT_IN_TM_LOAD (1):
14638 CASE_BUILT_IN_TM_LOAD (2):
14639 CASE_BUILT_IN_TM_LOAD (4):
14640 CASE_BUILT_IN_TM_LOAD (8):
14641 CASE_BUILT_IN_TM_LOAD (FLOAT):
14642 CASE_BUILT_IN_TM_LOAD (DOUBLE):
14643 CASE_BUILT_IN_TM_LOAD (LDOUBLE):
14644 CASE_BUILT_IN_TM_LOAD (M64):
14645 CASE_BUILT_IN_TM_LOAD (M128):
14646 CASE_BUILT_IN_TM_LOAD (M256):
14647 case BUILT_IN_TM_LOG:
14648 case BUILT_IN_TM_LOG_1:
14649 case BUILT_IN_TM_LOG_2:
14650 case BUILT_IN_TM_LOG_4:
14651 case BUILT_IN_TM_LOG_8:
14652 case BUILT_IN_TM_LOG_FLOAT:
14653 case BUILT_IN_TM_LOG_DOUBLE:
14654 case BUILT_IN_TM_LOG_LDOUBLE:
14655 case BUILT_IN_TM_LOG_M64:
14656 case BUILT_IN_TM_LOG_M128:
14657 case BUILT_IN_TM_LOG_M256:
14658 return ".cR ";
14660 case BUILT_IN_INDEX:
14661 case BUILT_IN_RINDEX:
14662 case BUILT_IN_STRCHR:
14663 case BUILT_IN_STRLEN:
14664 case BUILT_IN_STRRCHR:
14665 return ".cR ";
14666 case BUILT_IN_STRNLEN:
14667 return ".cR2";
14669 /* These read memory pointed to by the first argument.
14670 Allocating memory does not have any side-effects apart from
14671 being the definition point for the pointer.
14672 Unix98 specifies that errno is set on allocation failure. */
14673 case BUILT_IN_STRDUP:
14674 return "mCR ";
14675 case BUILT_IN_STRNDUP:
14676 return "mCR2";
14677 /* Allocating memory does not have any side-effects apart from
14678 being the definition point for the pointer. */
14679 case BUILT_IN_MALLOC:
14680 case BUILT_IN_ALIGNED_ALLOC:
14681 case BUILT_IN_CALLOC:
14682 case BUILT_IN_GOMP_ALLOC:
14683 return "mC";
14684 CASE_BUILT_IN_ALLOCA:
14685 return "mc";
14686 /* These read memory pointed to by the first argument with size
14687 in the third argument. */
14688 case BUILT_IN_MEMCHR:
14689 return ".cR3";
14690 /* These read memory pointed to by the first and second arguments. */
14691 case BUILT_IN_STRSTR:
14692 case BUILT_IN_STRPBRK:
14693 case BUILT_IN_STRCASECMP:
14694 case BUILT_IN_STRCSPN:
14695 case BUILT_IN_STRSPN:
14696 case BUILT_IN_STRCMP:
14697 case BUILT_IN_STRCMP_EQ:
14698 return ".cR R ";
14699 /* Freeing memory kills the pointed-to memory. More importantly
14700 the call has to serve as a barrier for moving loads and stores
14701 across it. */
14702 case BUILT_IN_STACK_RESTORE:
14703 case BUILT_IN_FREE:
14704 case BUILT_IN_GOMP_FREE:
14705 return ".co ";
14706 case BUILT_IN_VA_END:
14707 return ".cO ";
14708 /* Realloc serves both as allocation point and deallocation point. */
14709 case BUILT_IN_REALLOC:
14710 return ".Cw ";
14711 case BUILT_IN_GAMMA_R:
14712 case BUILT_IN_GAMMAF_R:
14713 case BUILT_IN_GAMMAL_R:
14714 case BUILT_IN_LGAMMA_R:
14715 case BUILT_IN_LGAMMAF_R:
14716 case BUILT_IN_LGAMMAL_R:
14717 return ".C. Ot";
14718 case BUILT_IN_FREXP:
14719 case BUILT_IN_FREXPF:
14720 case BUILT_IN_FREXPL:
14721 case BUILT_IN_MODF:
14722 case BUILT_IN_MODFF:
14723 case BUILT_IN_MODFL:
14724 return ".c. Ot";
14725 case BUILT_IN_REMQUO:
14726 case BUILT_IN_REMQUOF:
14727 case BUILT_IN_REMQUOL:
14728 return ".c. . Ot";
14729 case BUILT_IN_SINCOS:
14730 case BUILT_IN_SINCOSF:
14731 case BUILT_IN_SINCOSL:
14732 return ".c. OtOt";
14733 case BUILT_IN_MEMSET:
14734 case BUILT_IN_MEMSET_CHK:
14735 case BUILT_IN_TM_MEMSET:
14736 return "1cO3";
14737 CASE_BUILT_IN_TM_STORE (1):
14738 CASE_BUILT_IN_TM_STORE (2):
14739 CASE_BUILT_IN_TM_STORE (4):
14740 CASE_BUILT_IN_TM_STORE (8):
14741 CASE_BUILT_IN_TM_STORE (FLOAT):
14742 CASE_BUILT_IN_TM_STORE (DOUBLE):
14743 CASE_BUILT_IN_TM_STORE (LDOUBLE):
14744 CASE_BUILT_IN_TM_STORE (M64):
14745 CASE_BUILT_IN_TM_STORE (M128):
14746 CASE_BUILT_IN_TM_STORE (M256):
14747 return ".cO ";
14748 case BUILT_IN_STACK_SAVE:
14749 return ".c";
14750 case BUILT_IN_ASSUME_ALIGNED:
14751 return "1cX ";
14752 /* But posix_memalign stores a pointer into the memory pointed to
14753 by its first argument. */
14754 case BUILT_IN_POSIX_MEMALIGN:
14755 return ".cOt";
14757 default:
14758 return "";