Revise -mdisable-fpregs option and add new -msoft-mult option
[official-gcc.git] / gcc / pointer-query.cc
blob910f452868ef786a01427e6e77cf6113e6b85975
1 /* Definitions of the pointer_query and related classes.
3 Copyright (C) 2020-2021 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "stringpool.h"
28 #include "tree-vrp.h"
29 #include "diagnostic-core.h"
30 #include "fold-const.h"
31 #include "tree-object-size.h"
32 #include "tree-ssa-strlen.h"
33 #include "langhooks.h"
34 #include "stringpool.h"
35 #include "attribs.h"
36 #include "gimple-fold.h"
37 #include "gimple-ssa.h"
38 #include "intl.h"
39 #include "attr-fnspec.h"
40 #include "gimple-range.h"
41 #include "pointer-query.h"
42 #include "tree-pretty-print.h"
43 #include "tree-ssanames.h"
44 #include "target.h"
46 static bool compute_objsize_r (tree, int, access_ref *, ssa_name_limit_t &,
47 pointer_query *);
49 /* Wrapper around the wide_int overload of get_range that accepts
50 offset_int instead. For middle end expressions returns the same
51 result. For a subset of nonconstamt expressions emitted by the front
52 end determines a more precise range than would be possible otherwise. */
54 static bool
55 get_offset_range (tree x, gimple *stmt, offset_int r[2], range_query *rvals)
57 offset_int add = 0;
58 if (TREE_CODE (x) == PLUS_EXPR)
60 /* Handle constant offsets in pointer addition expressions seen
61 n the front end IL. */
62 tree op = TREE_OPERAND (x, 1);
63 if (TREE_CODE (op) == INTEGER_CST)
65 op = fold_convert (signed_type_for (TREE_TYPE (op)), op);
66 add = wi::to_offset (op);
67 x = TREE_OPERAND (x, 0);
71 if (TREE_CODE (x) == NOP_EXPR)
72 /* Also handle conversions to sizetype seen in the front end IL. */
73 x = TREE_OPERAND (x, 0);
75 tree type = TREE_TYPE (x);
76 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
77 return false;
79 if (TREE_CODE (x) != INTEGER_CST
80 && TREE_CODE (x) != SSA_NAME)
82 if (TYPE_UNSIGNED (type)
83 && TYPE_PRECISION (type) == TYPE_PRECISION (sizetype))
84 type = signed_type_for (type);
86 r[0] = wi::to_offset (TYPE_MIN_VALUE (type)) + add;
87 r[1] = wi::to_offset (TYPE_MAX_VALUE (type)) + add;
88 return x;
91 wide_int wr[2];
92 if (!get_range (x, stmt, wr, rvals))
93 return false;
95 signop sgn = SIGNED;
96 /* Only convert signed integers or unsigned sizetype to a signed
97 offset and avoid converting large positive values in narrower
98 types to negative offsets. */
99 if (TYPE_UNSIGNED (type)
100 && wr[0].get_precision () < TYPE_PRECISION (sizetype))
101 sgn = UNSIGNED;
103 r[0] = offset_int::from (wr[0], sgn);
104 r[1] = offset_int::from (wr[1], sgn);
105 return true;
108 /* Return the argument that the call STMT to a built-in function returns
109 or null if it doesn't. On success, set OFFRNG[] to the range of offsets
110 from the argument reflected in the value returned by the built-in if it
111 can be determined, otherwise to 0 and HWI_M1U respectively. Set
112 *PAST_END for functions like mempcpy that might return a past the end
113 pointer (most functions return a dereferenceable pointer to an existing
114 element of an array). */
116 static tree
117 gimple_call_return_array (gimple *stmt, offset_int offrng[2], bool *past_end,
118 range_query *rvals)
120 /* Clear and set below for the rare function(s) that might return
121 a past-the-end pointer. */
122 *past_end = false;
125 /* Check for attribute fn spec to see if the function returns one
126 of its arguments. */
127 attr_fnspec fnspec = gimple_call_fnspec (as_a <gcall *>(stmt));
128 unsigned int argno;
129 if (fnspec.returns_arg (&argno))
131 /* Functions return the first argument (not a range). */
132 offrng[0] = offrng[1] = 0;
133 return gimple_call_arg (stmt, argno);
137 if (gimple_call_num_args (stmt) < 1)
138 return NULL_TREE;
140 tree fn = gimple_call_fndecl (stmt);
141 if (!gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
143 /* See if this is a call to placement new. */
144 if (!fn
145 || !DECL_IS_OPERATOR_NEW_P (fn)
146 || DECL_IS_REPLACEABLE_OPERATOR_NEW_P (fn))
147 return NULL_TREE;
149 /* Check the mangling, keeping in mind that operator new takes
150 a size_t which could be unsigned int or unsigned long. */
151 tree fname = DECL_ASSEMBLER_NAME (fn);
152 if (!id_equal (fname, "_ZnwjPv") // ordinary form
153 && !id_equal (fname, "_ZnwmPv") // ordinary form
154 && !id_equal (fname, "_ZnajPv") // array form
155 && !id_equal (fname, "_ZnamPv")) // array form
156 return NULL_TREE;
158 if (gimple_call_num_args (stmt) != 2)
159 return NULL_TREE;
161 /* Allocation functions return a pointer to the beginning. */
162 offrng[0] = offrng[1] = 0;
163 return gimple_call_arg (stmt, 1);
166 switch (DECL_FUNCTION_CODE (fn))
168 case BUILT_IN_MEMCPY:
169 case BUILT_IN_MEMCPY_CHK:
170 case BUILT_IN_MEMMOVE:
171 case BUILT_IN_MEMMOVE_CHK:
172 case BUILT_IN_MEMSET:
173 case BUILT_IN_STRCAT:
174 case BUILT_IN_STRCAT_CHK:
175 case BUILT_IN_STRCPY:
176 case BUILT_IN_STRCPY_CHK:
177 case BUILT_IN_STRNCAT:
178 case BUILT_IN_STRNCAT_CHK:
179 case BUILT_IN_STRNCPY:
180 case BUILT_IN_STRNCPY_CHK:
181 /* Functions return the first argument (not a range). */
182 offrng[0] = offrng[1] = 0;
183 return gimple_call_arg (stmt, 0);
185 case BUILT_IN_MEMPCPY:
186 case BUILT_IN_MEMPCPY_CHK:
188 /* The returned pointer is in a range constrained by the smaller
189 of the upper bound of the size argument and the source object
190 size. */
191 offrng[0] = 0;
192 offrng[1] = HOST_WIDE_INT_M1U;
193 tree off = gimple_call_arg (stmt, 2);
194 bool off_valid = get_offset_range (off, stmt, offrng, rvals);
195 if (!off_valid || offrng[0] != offrng[1])
197 /* If the offset is either indeterminate or in some range,
198 try to constrain its upper bound to at most the size
199 of the source object. */
200 access_ref aref;
201 tree src = gimple_call_arg (stmt, 1);
202 if (compute_objsize (src, 1, &aref, rvals)
203 && aref.sizrng[1] < offrng[1])
204 offrng[1] = aref.sizrng[1];
207 /* Mempcpy may return a past-the-end pointer. */
208 *past_end = true;
209 return gimple_call_arg (stmt, 0);
212 case BUILT_IN_MEMCHR:
214 tree off = gimple_call_arg (stmt, 2);
215 if (get_offset_range (off, stmt, offrng, rvals))
216 offrng[1] -= 1;
217 else
218 offrng[1] = HOST_WIDE_INT_M1U;
220 offrng[0] = 0;
221 return gimple_call_arg (stmt, 0);
224 case BUILT_IN_STRCHR:
225 case BUILT_IN_STRRCHR:
226 case BUILT_IN_STRSTR:
227 offrng[0] = 0;
228 offrng[1] = HOST_WIDE_INT_M1U;
229 return gimple_call_arg (stmt, 0);
231 case BUILT_IN_STPCPY:
232 case BUILT_IN_STPCPY_CHK:
234 access_ref aref;
235 tree src = gimple_call_arg (stmt, 1);
236 if (compute_objsize (src, 1, &aref, rvals))
237 offrng[1] = aref.sizrng[1] - 1;
238 else
239 offrng[1] = HOST_WIDE_INT_M1U;
241 offrng[0] = 0;
242 return gimple_call_arg (stmt, 0);
245 case BUILT_IN_STPNCPY:
246 case BUILT_IN_STPNCPY_CHK:
248 /* The returned pointer is in a range between the first argument
249 and it plus the smaller of the upper bound of the size argument
250 and the source object size. */
251 offrng[1] = HOST_WIDE_INT_M1U;
252 tree off = gimple_call_arg (stmt, 2);
253 if (!get_offset_range (off, stmt, offrng, rvals)
254 || offrng[0] != offrng[1])
256 /* If the offset is either indeterminate or in some range,
257 try to constrain its upper bound to at most the size
258 of the source object. */
259 access_ref aref;
260 tree src = gimple_call_arg (stmt, 1);
261 if (compute_objsize (src, 1, &aref, rvals)
262 && aref.sizrng[1] < offrng[1])
263 offrng[1] = aref.sizrng[1];
266 /* When the source is the empty string the returned pointer is
267 a copy of the argument. Otherwise stpcpy can also return
268 a past-the-end pointer. */
269 offrng[0] = 0;
270 *past_end = true;
271 return gimple_call_arg (stmt, 0);
274 default:
275 break;
278 return NULL_TREE;
281 /* Return true when EXP's range can be determined and set RANGE[] to it
282 after adjusting it if necessary to make EXP a represents a valid size
283 of object, or a valid size argument to an allocation function declared
284 with attribute alloc_size (whose argument may be signed), or to a string
285 manipulation function like memset.
286 When ALLOW_ZERO is set in FLAGS, allow returning a range of [0, 0] for
287 a size in an anti-range [1, N] where N > PTRDIFF_MAX. A zero range is
288 a (nearly) invalid argument to allocation functions like malloc but it
289 is a valid argument to functions like memset.
290 When USE_LARGEST is set in FLAGS set RANGE to the largest valid subrange
291 in a multi-range, otherwise to the smallest valid subrange. */
293 bool
294 get_size_range (range_query *query, tree exp, gimple *stmt, tree range[2],
295 int flags /* = 0 */)
297 if (!exp)
298 return false;
300 if (tree_fits_uhwi_p (exp))
302 /* EXP is a constant. */
303 range[0] = range[1] = exp;
304 return true;
307 tree exptype = TREE_TYPE (exp);
308 bool integral = INTEGRAL_TYPE_P (exptype);
310 wide_int min, max;
311 enum value_range_kind range_type;
313 if (!query)
314 query = get_range_query (cfun);
316 if (integral)
318 value_range vr;
320 query->range_of_expr (vr, exp, stmt);
322 if (vr.undefined_p ())
323 vr.set_varying (TREE_TYPE (exp));
324 range_type = vr.kind ();
325 min = wi::to_wide (vr.min ());
326 max = wi::to_wide (vr.max ());
328 else
329 range_type = VR_VARYING;
331 if (range_type == VR_VARYING)
333 if (integral)
335 /* Use the full range of the type of the expression when
336 no value range information is available. */
337 range[0] = TYPE_MIN_VALUE (exptype);
338 range[1] = TYPE_MAX_VALUE (exptype);
339 return true;
342 range[0] = NULL_TREE;
343 range[1] = NULL_TREE;
344 return false;
347 unsigned expprec = TYPE_PRECISION (exptype);
349 bool signed_p = !TYPE_UNSIGNED (exptype);
351 if (range_type == VR_ANTI_RANGE)
353 if (signed_p)
355 if (wi::les_p (max, 0))
357 /* EXP is not in a strictly negative range. That means
358 it must be in some (not necessarily strictly) positive
359 range which includes zero. Since in signed to unsigned
360 conversions negative values end up converted to large
361 positive values, and otherwise they are not valid sizes,
362 the resulting range is in both cases [0, TYPE_MAX]. */
363 min = wi::zero (expprec);
364 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
366 else if (wi::les_p (min - 1, 0))
368 /* EXP is not in a negative-positive range. That means EXP
369 is either negative, or greater than max. Since negative
370 sizes are invalid make the range [MAX + 1, TYPE_MAX]. */
371 min = max + 1;
372 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
374 else
376 max = min - 1;
377 min = wi::zero (expprec);
380 else
382 wide_int maxsize = wi::to_wide (max_object_size ());
383 min = wide_int::from (min, maxsize.get_precision (), UNSIGNED);
384 max = wide_int::from (max, maxsize.get_precision (), UNSIGNED);
385 if (wi::eq_p (0, min - 1))
387 /* EXP is unsigned and not in the range [1, MAX]. That means
388 it's either zero or greater than MAX. Even though 0 would
389 normally be detected by -Walloc-zero, unless ALLOW_ZERO
390 is set, set the range to [MAX, TYPE_MAX] so that when MAX
391 is greater than the limit the whole range is diagnosed. */
392 wide_int maxsize = wi::to_wide (max_object_size ());
393 if (flags & SR_ALLOW_ZERO)
395 if (wi::leu_p (maxsize, max + 1)
396 || !(flags & SR_USE_LARGEST))
397 min = max = wi::zero (expprec);
398 else
400 min = max + 1;
401 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
404 else
406 min = max + 1;
407 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
410 else if ((flags & SR_USE_LARGEST)
411 && wi::ltu_p (max + 1, maxsize))
413 /* When USE_LARGEST is set and the larger of the two subranges
414 is a valid size, use it... */
415 min = max + 1;
416 max = maxsize;
418 else
420 /* ...otherwise use the smaller subrange. */
421 max = min - 1;
422 min = wi::zero (expprec);
427 range[0] = wide_int_to_tree (exptype, min);
428 range[1] = wide_int_to_tree (exptype, max);
430 return true;
433 bool
434 get_size_range (tree exp, tree range[2], int flags /* = 0 */)
436 return get_size_range (/*query=*/NULL, exp, /*stmt=*/NULL, range, flags);
439 /* If STMT is a call to an allocation function, returns the constant
440 maximum size of the object allocated by the call represented as
441 sizetype. If nonnull, sets RNG1[] to the range of the size.
442 When nonnull, uses RVALS for range information, otherwise gets global
443 range info.
444 Returns null when STMT is not a call to a valid allocation function. */
446 tree
447 gimple_call_alloc_size (gimple *stmt, wide_int rng1[2] /* = NULL */,
448 range_query * /* = NULL */)
450 if (!stmt || !is_gimple_call (stmt))
451 return NULL_TREE;
453 tree allocfntype;
454 if (tree fndecl = gimple_call_fndecl (stmt))
455 allocfntype = TREE_TYPE (fndecl);
456 else
457 allocfntype = gimple_call_fntype (stmt);
459 if (!allocfntype)
460 return NULL_TREE;
462 unsigned argidx1 = UINT_MAX, argidx2 = UINT_MAX;
463 tree at = lookup_attribute ("alloc_size", TYPE_ATTRIBUTES (allocfntype));
464 if (!at)
466 if (!gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN))
467 return NULL_TREE;
469 argidx1 = 0;
472 unsigned nargs = gimple_call_num_args (stmt);
474 if (argidx1 == UINT_MAX)
476 tree atval = TREE_VALUE (at);
477 if (!atval)
478 return NULL_TREE;
480 argidx1 = TREE_INT_CST_LOW (TREE_VALUE (atval)) - 1;
481 if (nargs <= argidx1)
482 return NULL_TREE;
484 atval = TREE_CHAIN (atval);
485 if (atval)
487 argidx2 = TREE_INT_CST_LOW (TREE_VALUE (atval)) - 1;
488 if (nargs <= argidx2)
489 return NULL_TREE;
493 tree size = gimple_call_arg (stmt, argidx1);
495 wide_int rng1_buf[2];
496 /* If RNG1 is not set, use the buffer. */
497 if (!rng1)
498 rng1 = rng1_buf;
500 /* Use maximum precision to avoid overflow below. */
501 const int prec = ADDR_MAX_PRECISION;
504 tree r[2];
505 /* Determine the largest valid range size, including zero. */
506 if (!get_size_range (size, r, SR_ALLOW_ZERO | SR_USE_LARGEST))
507 return NULL_TREE;
508 rng1[0] = wi::to_wide (r[0], prec);
509 rng1[1] = wi::to_wide (r[1], prec);
512 if (argidx2 > nargs && TREE_CODE (size) == INTEGER_CST)
513 return fold_convert (sizetype, size);
515 /* To handle ranges do the math in wide_int and return the product
516 of the upper bounds as a constant. Ignore anti-ranges. */
517 tree n = argidx2 < nargs ? gimple_call_arg (stmt, argidx2) : integer_one_node;
518 wide_int rng2[2];
520 tree r[2];
521 /* As above, use the full non-negative range on failure. */
522 if (!get_size_range (n, r, SR_ALLOW_ZERO | SR_USE_LARGEST))
523 return NULL_TREE;
524 rng2[0] = wi::to_wide (r[0], prec);
525 rng2[1] = wi::to_wide (r[1], prec);
528 /* Compute products of both bounds for the caller but return the lesser
529 of SIZE_MAX and the product of the upper bounds as a constant. */
530 rng1[0] = rng1[0] * rng2[0];
531 rng1[1] = rng1[1] * rng2[1];
533 const tree size_max = TYPE_MAX_VALUE (sizetype);
534 if (wi::gtu_p (rng1[1], wi::to_wide (size_max, prec)))
536 rng1[1] = wi::to_wide (size_max, prec);
537 return size_max;
540 return wide_int_to_tree (sizetype, rng1[1]);
543 /* For an access to an object referenced to by the function parameter PTR
544 of pointer type, and set RNG[] to the range of sizes of the object
545 obtainedfrom the attribute access specification for the current function.
546 Set STATIC_ARRAY if the array parameter has been declared [static].
547 Return the function parameter on success and null otherwise. */
549 tree
550 gimple_parm_array_size (tree ptr, wide_int rng[2],
551 bool *static_array /* = NULL */)
553 /* For a function argument try to determine the byte size of the array
554 from the current function declaratation (e.g., attribute access or
555 related). */
556 tree var = SSA_NAME_VAR (ptr);
557 if (TREE_CODE (var) != PARM_DECL)
558 return NULL_TREE;
560 const unsigned prec = TYPE_PRECISION (sizetype);
562 rdwr_map rdwr_idx;
563 attr_access *access = get_parm_access (rdwr_idx, var);
564 if (!access)
565 return NULL_TREE;
567 if (access->sizarg != UINT_MAX)
569 /* TODO: Try to extract the range from the argument based on
570 those of subsequent assertions or based on known calls to
571 the current function. */
572 return NULL_TREE;
575 if (!access->minsize)
576 return NULL_TREE;
578 /* Only consider ordinary array bound at level 2 (or above if it's
579 ever added). */
580 if (warn_array_parameter < 2 && !access->static_p)
581 return NULL_TREE;
583 if (static_array)
584 *static_array = access->static_p;
586 rng[0] = wi::zero (prec);
587 rng[1] = wi::uhwi (access->minsize, prec);
588 /* Multiply the array bound encoded in the attribute by the size
589 of what the pointer argument to which it decays points to. */
590 tree eltype = TREE_TYPE (TREE_TYPE (ptr));
591 tree size = TYPE_SIZE_UNIT (eltype);
592 if (!size || TREE_CODE (size) != INTEGER_CST)
593 return NULL_TREE;
595 rng[1] *= wi::to_wide (size, prec);
596 return var;
599 access_ref::access_ref (tree bound /* = NULL_TREE */,
600 bool minaccess /* = false */)
601 : ref (), eval ([](tree x){ return x; }), deref (), trail1special (true),
602 base0 (true), parmarray ()
604 /* Set to valid. */
605 offrng[0] = offrng[1] = 0;
606 offmax[0] = offmax[1] = 0;
607 /* Invalidate. */
608 sizrng[0] = sizrng[1] = -1;
610 /* Set the default bounds of the access and adjust below. */
611 bndrng[0] = minaccess ? 1 : 0;
612 bndrng[1] = HOST_WIDE_INT_M1U;
614 /* When BOUND is nonnull and a range can be extracted from it,
615 set the bounds of the access to reflect both it and MINACCESS.
616 BNDRNG[0] is the size of the minimum access. */
617 tree rng[2];
618 if (bound && get_size_range (bound, rng, SR_ALLOW_ZERO))
620 bndrng[0] = wi::to_offset (rng[0]);
621 bndrng[1] = wi::to_offset (rng[1]);
622 bndrng[0] = bndrng[0] > 0 && minaccess ? 1 : 0;
626 /* Return the PHI node REF refers to or null if it doesn't. */
628 gphi *
629 access_ref::phi () const
631 if (!ref || TREE_CODE (ref) != SSA_NAME)
632 return NULL;
634 gimple *def_stmt = SSA_NAME_DEF_STMT (ref);
635 if (!def_stmt || gimple_code (def_stmt) != GIMPLE_PHI)
636 return NULL;
638 return as_a <gphi *> (def_stmt);
641 /* Determine and return the largest object to which *THIS refers. If
642 *THIS refers to a PHI and PREF is nonnull, fill *PREF with the details
643 of the object determined by compute_objsize(ARG, OSTYPE) for each PHI
644 argument ARG. */
646 tree
647 access_ref::get_ref (vec<access_ref> *all_refs,
648 access_ref *pref /* = NULL */,
649 int ostype /* = 1 */,
650 ssa_name_limit_t *psnlim /* = NULL */,
651 pointer_query *qry /* = NULL */) const
653 gphi *phi_stmt = this->phi ();
654 if (!phi_stmt)
655 return ref;
657 /* FIXME: Calling get_ref() with a null PSNLIM is dangerous and might
658 cause unbounded recursion. */
659 ssa_name_limit_t snlim_buf;
660 if (!psnlim)
661 psnlim = &snlim_buf;
663 if (!psnlim->visit_phi (ref))
664 return NULL_TREE;
666 pointer_query empty_qry;
667 if (!qry)
668 qry = &empty_qry;
670 /* The conservative result of the PHI reflecting the offset and size
671 of the largest PHI argument, regardless of whether or not they all
672 refer to the same object. */
673 access_ref phi_ref;
674 if (pref)
676 /* The identity of the object has not been determined yet but
677 PREF->REF is set by the caller to the PHI for convenience.
678 The size is negative/invalid and the offset is zero (it's
679 updated only after the identity of the object has been
680 established). */
681 gcc_assert (pref->sizrng[0] < 0);
682 gcc_assert (pref->offrng[0] == 0 && pref->offrng[1] == 0);
684 phi_ref = *pref;
687 /* Set if any argument is a function array (or VLA) parameter not
688 declared [static]. */
689 bool parmarray = false;
690 /* The size of the smallest object referenced by the PHI arguments. */
691 offset_int minsize = 0;
692 const offset_int maxobjsize = wi::to_offset (max_object_size ());
694 const unsigned nargs = gimple_phi_num_args (phi_stmt);
695 for (unsigned i = 0; i < nargs; ++i)
697 access_ref phi_arg_ref;
698 tree arg = gimple_phi_arg_def (phi_stmt, i);
699 if (!compute_objsize_r (arg, ostype, &phi_arg_ref, *psnlim, qry)
700 || phi_arg_ref.sizrng[0] < 0)
701 /* A PHI with all null pointer arguments. */
702 return NULL_TREE;
704 if (TREE_CODE (arg) == SSA_NAME)
705 qry->put_ref (arg, phi_arg_ref);
707 if (all_refs)
708 all_refs->safe_push (phi_arg_ref);
710 parmarray |= phi_arg_ref.parmarray;
712 const bool nullp = integer_zerop (arg) && (i || i + 1 < nargs);
714 if (phi_ref.sizrng[0] < 0)
716 /* If PHI_REF doesn't contain a meaningful result yet set it
717 to the result for the first argument. */
718 if (!nullp)
719 phi_ref = phi_arg_ref;
721 /* Set if the current argument refers to one or more objects of
722 known size (or range of sizes), as opposed to referring to
723 one or more unknown object(s). */
724 const bool arg_known_size = (phi_arg_ref.sizrng[0] != 0
725 || phi_arg_ref.sizrng[1] != maxobjsize);
726 if (arg_known_size)
727 minsize = phi_arg_ref.sizrng[0];
729 continue;
732 const bool phi_known_size = (phi_ref.sizrng[0] != 0
733 || phi_ref.sizrng[1] != maxobjsize);
735 if (phi_known_size && phi_arg_ref.sizrng[0] < minsize)
736 minsize = phi_arg_ref.sizrng[0];
738 /* Disregard null pointers in PHIs with two or more arguments.
739 TODO: Handle this better! */
740 if (nullp)
741 continue;
743 /* Determine the amount of remaining space in the argument. */
744 offset_int argrem[2];
745 argrem[1] = phi_arg_ref.size_remaining (argrem);
747 /* Determine the amount of remaining space computed so far and
748 if the remaining space in the argument is more use it instead. */
749 offset_int phirem[2];
750 phirem[1] = phi_ref.size_remaining (phirem);
752 /* Reset the PHI's BASE0 flag if any of the nonnull arguments
753 refers to an object at an unknown offset. */
754 if (!phi_arg_ref.base0)
755 phi_ref.base0 = false;
757 if (phirem[1] < argrem[1]
758 || (phirem[1] == argrem[1]
759 && phi_ref.sizrng[1] < phi_arg_ref.sizrng[1]))
760 /* Use the argument with the most space remaining as the result,
761 or the larger one if the space is equal. */
762 phi_ref = phi_arg_ref;
765 /* Replace the lower bound of the largest argument with the size
766 of the smallest argument, and set PARMARRAY if any argument
767 was one. */
768 phi_ref.sizrng[0] = minsize;
769 phi_ref.parmarray = parmarray;
771 if (phi_ref.sizrng[0] < 0)
773 /* Fail if none of the PHI's arguments resulted in updating PHI_REF
774 (perhaps because they have all been already visited by prior
775 recursive calls). */
776 psnlim->leave_phi (ref);
777 return NULL_TREE;
780 /* Avoid changing *THIS. */
781 if (pref && pref != this)
782 *pref = phi_ref;
784 psnlim->leave_phi (ref);
786 return phi_ref.ref;
789 /* Return the maximum amount of space remaining and if non-null, set
790 argument to the minimum. */
792 offset_int
793 access_ref::size_remaining (offset_int *pmin /* = NULL */) const
795 offset_int minbuf;
796 if (!pmin)
797 pmin = &minbuf;
799 if (sizrng[0] < 0)
801 /* If the identity of the object hasn't been determined return
802 the maximum size range. */
803 *pmin = 0;
804 return wi::to_offset (max_object_size ());
807 /* add_offset() ensures the offset range isn't inverted. */
808 gcc_checking_assert (offrng[0] <= offrng[1]);
810 if (base0)
812 /* The offset into referenced object is zero-based (i.e., it's
813 not referenced by a pointer into middle of some unknown object). */
814 if (offrng[0] < 0 && offrng[1] < 0)
816 /* If the offset is negative the remaining size is zero. */
817 *pmin = 0;
818 return 0;
821 if (sizrng[1] <= offrng[0])
823 /* If the starting offset is greater than or equal to the upper
824 bound on the size of the object, the space remaining is zero.
825 As a special case, if it's equal, set *PMIN to -1 to let
826 the caller know the offset is valid and just past the end. */
827 *pmin = sizrng[1] == offrng[0] ? -1 : 0;
828 return 0;
831 /* Otherwise return the size minus the lower bound of the offset. */
832 offset_int or0 = offrng[0] < 0 ? 0 : offrng[0];
834 *pmin = sizrng[0] - or0;
835 return sizrng[1] - or0;
838 /* The offset to the referenced object isn't zero-based (i.e., it may
839 refer to a byte other than the first. The size of such an object
840 is constrained only by the size of the address space (the result
841 of max_object_size()). */
842 if (sizrng[1] <= offrng[0])
844 *pmin = 0;
845 return 0;
848 offset_int or0 = offrng[0] < 0 ? 0 : offrng[0];
850 *pmin = sizrng[0] - or0;
851 return sizrng[1] - or0;
854 /* Return true if the offset and object size are in range for SIZE. */
856 bool
857 access_ref::offset_in_range (const offset_int &size) const
859 if (size_remaining () < size)
860 return false;
862 if (base0)
863 return offmax[0] >= 0 && offmax[1] <= sizrng[1];
865 offset_int maxoff = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
866 return offmax[0] > -maxoff && offmax[1] < maxoff;
869 /* Add the range [MIN, MAX] to the offset range. For known objects (with
870 zero-based offsets) at least one of whose offset's bounds is in range,
871 constrain the other (or both) to the bounds of the object (i.e., zero
872 and the upper bound of its size). This improves the quality of
873 diagnostics. */
875 void access_ref::add_offset (const offset_int &min, const offset_int &max)
877 if (min <= max)
879 /* To add an ordinary range just add it to the bounds. */
880 offrng[0] += min;
881 offrng[1] += max;
883 else if (!base0)
885 /* To add an inverted range to an offset to an unknown object
886 expand it to the maximum. */
887 add_max_offset ();
888 return;
890 else
892 /* To add an inverted range to an offset to an known object set
893 the upper bound to the maximum representable offset value
894 (which may be greater than MAX_OBJECT_SIZE).
895 The lower bound is either the sum of the current offset and
896 MIN when abs(MAX) is greater than the former, or zero otherwise.
897 Zero because then then inverted range includes the negative of
898 the lower bound. */
899 offset_int maxoff = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
900 offrng[1] = maxoff;
902 if (max >= 0)
904 offrng[0] = 0;
905 if (offmax[0] > 0)
906 offmax[0] = 0;
907 return;
910 offset_int absmax = wi::abs (max);
911 if (offrng[0] < absmax)
913 offrng[0] += min;
914 /* Cap the lower bound at the upper (set to MAXOFF above)
915 to avoid inadvertently recreating an inverted range. */
916 if (offrng[1] < offrng[0])
917 offrng[0] = offrng[1];
919 else
920 offrng[0] = 0;
923 /* Set the minimum and maximmum computed so far. */
924 if (offrng[1] < 0 && offrng[1] < offmax[0])
925 offmax[0] = offrng[1];
926 if (offrng[0] > 0 && offrng[0] > offmax[1])
927 offmax[1] = offrng[0];
929 if (!base0)
930 return;
932 /* When referencing a known object check to see if the offset computed
933 so far is in bounds... */
934 offset_int remrng[2];
935 remrng[1] = size_remaining (remrng);
936 if (remrng[1] > 0 || remrng[0] < 0)
938 /* ...if so, constrain it so that neither bound exceeds the size of
939 the object. Out of bounds offsets are left unchanged, and, for
940 better or worse, become in bounds later. They should be detected
941 and diagnosed at the point they first become invalid by
942 -Warray-bounds. */
943 if (offrng[0] < 0)
944 offrng[0] = 0;
945 if (offrng[1] > sizrng[1])
946 offrng[1] = sizrng[1];
950 /* Issue one inform message describing each target of an access REF.
951 WRITE is set for a write access and clear for a read access. */
953 void
954 access_ref::inform_access (access_mode mode) const
956 const access_ref &aref = *this;
957 if (!aref.ref)
958 return;
960 if (aref.phi ())
962 /* Set MAXREF to refer to the largest object and fill ALL_REFS
963 with data for all objects referenced by the PHI arguments. */
964 access_ref maxref;
965 auto_vec<access_ref> all_refs;
966 if (!get_ref (&all_refs, &maxref))
967 return;
969 /* Except for MAXREF, the rest of the arguments' offsets need not
970 reflect one added to the PHI itself. Determine the latter from
971 MAXREF on which the result is based. */
972 const offset_int orng[] =
974 offrng[0] - maxref.offrng[0],
975 wi::smax (offrng[1] - maxref.offrng[1], offrng[0]),
978 /* Add the final PHI's offset to that of each of the arguments
979 and recurse to issue an inform message for it. */
980 for (unsigned i = 0; i != all_refs.length (); ++i)
982 /* Skip any PHIs; those could lead to infinite recursion. */
983 if (all_refs[i].phi ())
984 continue;
986 all_refs[i].add_offset (orng[0], orng[1]);
987 all_refs[i].inform_access (mode);
989 return;
992 /* Convert offset range and avoid including a zero range since it
993 isn't necessarily meaningful. */
994 HOST_WIDE_INT diff_min = tree_to_shwi (TYPE_MIN_VALUE (ptrdiff_type_node));
995 HOST_WIDE_INT diff_max = tree_to_shwi (TYPE_MAX_VALUE (ptrdiff_type_node));
996 HOST_WIDE_INT minoff;
997 HOST_WIDE_INT maxoff = diff_max;
998 if (wi::fits_shwi_p (aref.offrng[0]))
999 minoff = aref.offrng[0].to_shwi ();
1000 else
1001 minoff = aref.offrng[0] < 0 ? diff_min : diff_max;
1003 if (wi::fits_shwi_p (aref.offrng[1]))
1004 maxoff = aref.offrng[1].to_shwi ();
1006 if (maxoff <= diff_min || maxoff >= diff_max)
1007 /* Avoid mentioning an upper bound that's equal to or in excess
1008 of the maximum of ptrdiff_t. */
1009 maxoff = minoff;
1011 /* Convert size range and always include it since all sizes are
1012 meaningful. */
1013 unsigned long long minsize = 0, maxsize = 0;
1014 if (wi::fits_shwi_p (aref.sizrng[0])
1015 && wi::fits_shwi_p (aref.sizrng[1]))
1017 minsize = aref.sizrng[0].to_shwi ();
1018 maxsize = aref.sizrng[1].to_shwi ();
1021 /* SIZRNG doesn't necessarily have the same range as the allocation
1022 size determined by gimple_call_alloc_size (). */
1023 char sizestr[80];
1024 if (minsize == maxsize)
1025 sprintf (sizestr, "%llu", minsize);
1026 else
1027 sprintf (sizestr, "[%llu, %llu]", minsize, maxsize);
1029 char offstr[80];
1030 if (minoff == 0
1031 && (maxoff == 0 || aref.sizrng[1] <= maxoff))
1032 offstr[0] = '\0';
1033 else if (minoff == maxoff)
1034 sprintf (offstr, "%lli", (long long) minoff);
1035 else
1036 sprintf (offstr, "[%lli, %lli]", (long long) minoff, (long long) maxoff);
1038 location_t loc = UNKNOWN_LOCATION;
1040 tree ref = this->ref;
1041 tree allocfn = NULL_TREE;
1042 if (TREE_CODE (ref) == SSA_NAME)
1044 gimple *stmt = SSA_NAME_DEF_STMT (ref);
1045 if (!stmt)
1046 return;
1048 if (is_gimple_call (stmt))
1050 loc = gimple_location (stmt);
1051 if (gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN))
1053 /* Strip the SSA_NAME suffix from the variable name and
1054 recreate an identifier with the VLA's original name. */
1055 ref = gimple_call_lhs (stmt);
1056 if (SSA_NAME_IDENTIFIER (ref))
1058 ref = SSA_NAME_IDENTIFIER (ref);
1059 const char *id = IDENTIFIER_POINTER (ref);
1060 size_t len = strcspn (id, ".$");
1061 if (!len)
1062 len = strlen (id);
1063 ref = get_identifier_with_length (id, len);
1066 else
1068 /* Except for VLAs, retrieve the allocation function. */
1069 allocfn = gimple_call_fndecl (stmt);
1070 if (!allocfn)
1071 allocfn = gimple_call_fn (stmt);
1072 if (TREE_CODE (allocfn) == SSA_NAME)
1074 /* For an ALLOC_CALL via a function pointer make a small
1075 effort to determine the destination of the pointer. */
1076 gimple *def = SSA_NAME_DEF_STMT (allocfn);
1077 if (gimple_assign_single_p (def))
1079 tree rhs = gimple_assign_rhs1 (def);
1080 if (DECL_P (rhs))
1081 allocfn = rhs;
1082 else if (TREE_CODE (rhs) == COMPONENT_REF)
1083 allocfn = TREE_OPERAND (rhs, 1);
1088 else if (gimple_nop_p (stmt))
1089 /* Handle DECL_PARM below. */
1090 ref = SSA_NAME_VAR (ref);
1091 else if (is_gimple_assign (stmt)
1092 && (gimple_assign_rhs_code (stmt) == MIN_EXPR
1093 || gimple_assign_rhs_code (stmt) == MAX_EXPR))
1095 /* MIN or MAX_EXPR here implies a reference to a known object
1096 and either an unknown or distinct one (the latter being
1097 the result of an invalid relational expression). Determine
1098 the identity of the former and point to it in the note.
1099 TODO: Consider merging with PHI handling. */
1100 access_ref arg_ref[2];
1101 tree arg = gimple_assign_rhs1 (stmt);
1102 compute_objsize (arg, /* ostype = */ 1 , &arg_ref[0]);
1103 arg = gimple_assign_rhs2 (stmt);
1104 compute_objsize (arg, /* ostype = */ 1 , &arg_ref[1]);
1106 /* Use the argument that references a known object with more
1107 space remaining. */
1108 const bool idx
1109 = (!arg_ref[0].ref || !arg_ref[0].base0
1110 || (arg_ref[0].base0 && arg_ref[1].base0
1111 && (arg_ref[0].size_remaining ()
1112 < arg_ref[1].size_remaining ())));
1114 arg_ref[idx].offrng[0] = offrng[0];
1115 arg_ref[idx].offrng[1] = offrng[1];
1116 arg_ref[idx].inform_access (mode);
1117 return;
1121 if (DECL_P (ref))
1122 loc = DECL_SOURCE_LOCATION (ref);
1123 else if (EXPR_P (ref) && EXPR_HAS_LOCATION (ref))
1124 loc = EXPR_LOCATION (ref);
1125 else if (TREE_CODE (ref) != IDENTIFIER_NODE
1126 && TREE_CODE (ref) != SSA_NAME)
1127 return;
1129 if (mode == access_read_write || mode == access_write_only)
1131 if (allocfn == NULL_TREE)
1133 if (*offstr)
1134 inform (loc, "at offset %s into destination object %qE of size %s",
1135 offstr, ref, sizestr);
1136 else
1137 inform (loc, "destination object %qE of size %s", ref, sizestr);
1138 return;
1141 if (*offstr)
1142 inform (loc,
1143 "at offset %s into destination object of size %s "
1144 "allocated by %qE", offstr, sizestr, allocfn);
1145 else
1146 inform (loc, "destination object of size %s allocated by %qE",
1147 sizestr, allocfn);
1148 return;
1151 if (mode == access_read_only)
1153 if (allocfn == NULL_TREE)
1155 if (*offstr)
1156 inform (loc, "at offset %s into source object %qE of size %s",
1157 offstr, ref, sizestr);
1158 else
1159 inform (loc, "source object %qE of size %s", ref, sizestr);
1161 return;
1164 if (*offstr)
1165 inform (loc,
1166 "at offset %s into source object of size %s allocated by %qE",
1167 offstr, sizestr, allocfn);
1168 else
1169 inform (loc, "source object of size %s allocated by %qE",
1170 sizestr, allocfn);
1171 return;
1174 if (allocfn == NULL_TREE)
1176 if (*offstr)
1177 inform (loc, "at offset %s into object %qE of size %s",
1178 offstr, ref, sizestr);
1179 else
1180 inform (loc, "object %qE of size %s", ref, sizestr);
1182 return;
1185 if (*offstr)
1186 inform (loc,
1187 "at offset %s into object of size %s allocated by %qE",
1188 offstr, sizestr, allocfn);
1189 else
1190 inform (loc, "object of size %s allocated by %qE",
1191 sizestr, allocfn);
1194 /* Set a bit for the PHI in VISITED and return true if it wasn't
1195 already set. */
1197 bool
1198 ssa_name_limit_t::visit_phi (tree ssa_name)
1200 if (!visited)
1201 visited = BITMAP_ALLOC (NULL);
1203 /* Return false if SSA_NAME has already been visited. */
1204 return bitmap_set_bit (visited, SSA_NAME_VERSION (ssa_name));
1207 /* Clear a bit for the PHI in VISITED. */
1209 void
1210 ssa_name_limit_t::leave_phi (tree ssa_name)
1212 /* Return false if SSA_NAME has already been visited. */
1213 bitmap_clear_bit (visited, SSA_NAME_VERSION (ssa_name));
1216 /* Return false if the SSA_NAME chain length counter has reached
1217 the limit, otherwise increment the counter and return true. */
1219 bool
1220 ssa_name_limit_t::next ()
1222 /* Return a negative value to let caller avoid recursing beyond
1223 the specified limit. */
1224 if (ssa_def_max == 0)
1225 return false;
1227 --ssa_def_max;
1228 return true;
1231 /* If the SSA_NAME has already been "seen" return a positive value.
1232 Otherwise add it to VISITED. If the SSA_NAME limit has been
1233 reached, return a negative value. Otherwise return zero. */
1236 ssa_name_limit_t::next_phi (tree ssa_name)
1239 gimple *def_stmt = SSA_NAME_DEF_STMT (ssa_name);
1240 /* Return a positive value if the PHI has already been visited. */
1241 if (gimple_code (def_stmt) == GIMPLE_PHI
1242 && !visit_phi (ssa_name))
1243 return 1;
1246 /* Return a negative value to let caller avoid recursing beyond
1247 the specified limit. */
1248 if (ssa_def_max == 0)
1249 return -1;
1251 --ssa_def_max;
1253 return 0;
1256 ssa_name_limit_t::~ssa_name_limit_t ()
1258 if (visited)
1259 BITMAP_FREE (visited);
1262 /* Default ctor. Initialize object with pointers to the range_query
1263 and cache_type instances to use or null. */
1265 pointer_query::pointer_query (range_query *qry /* = NULL */,
1266 cache_type *cache /* = NULL */)
1267 : rvals (qry), var_cache (cache), hits (), misses (),
1268 failures (), depth (), max_depth ()
1270 /* No op. */
1273 /* Return a pointer to the cached access_ref instance for the SSA_NAME
1274 PTR if it's there or null otherwise. */
1276 const access_ref *
1277 pointer_query::get_ref (tree ptr, int ostype /* = 1 */) const
1279 if (!var_cache)
1281 ++misses;
1282 return NULL;
1285 unsigned version = SSA_NAME_VERSION (ptr);
1286 unsigned idx = version << 1 | (ostype & 1);
1287 if (var_cache->indices.length () <= idx)
1289 ++misses;
1290 return NULL;
1293 unsigned cache_idx = var_cache->indices[idx];
1294 if (var_cache->access_refs.length () <= cache_idx)
1296 ++misses;
1297 return NULL;
1300 access_ref &cache_ref = var_cache->access_refs[cache_idx];
1301 if (cache_ref.ref)
1303 ++hits;
1304 return &cache_ref;
1307 ++misses;
1308 return NULL;
1311 /* Retrieve the access_ref instance for a variable from the cache if it's
1312 there or compute it and insert it into the cache if it's nonnonull. */
1314 bool
1315 pointer_query::get_ref (tree ptr, access_ref *pref, int ostype /* = 1 */)
1317 const unsigned version
1318 = TREE_CODE (ptr) == SSA_NAME ? SSA_NAME_VERSION (ptr) : 0;
1320 if (var_cache && version)
1322 unsigned idx = version << 1 | (ostype & 1);
1323 if (idx < var_cache->indices.length ())
1325 unsigned cache_idx = var_cache->indices[idx] - 1;
1326 if (cache_idx < var_cache->access_refs.length ()
1327 && var_cache->access_refs[cache_idx].ref)
1329 ++hits;
1330 *pref = var_cache->access_refs[cache_idx];
1331 return true;
1335 ++misses;
1338 if (!compute_objsize (ptr, ostype, pref, this))
1340 ++failures;
1341 return false;
1344 return true;
1347 /* Add a copy of the access_ref REF for the SSA_NAME to the cache if it's
1348 nonnull. */
1350 void
1351 pointer_query::put_ref (tree ptr, const access_ref &ref, int ostype /* = 1 */)
1353 /* Only add populated/valid entries. */
1354 if (!var_cache || !ref.ref || ref.sizrng[0] < 0)
1355 return;
1357 /* Add REF to the two-level cache. */
1358 unsigned version = SSA_NAME_VERSION (ptr);
1359 unsigned idx = version << 1 | (ostype & 1);
1361 /* Grow INDICES if necessary. An index is valid if it's nonzero.
1362 Its value minus one is the index into ACCESS_REFS. Not all
1363 entries are valid. */
1364 if (var_cache->indices.length () <= idx)
1365 var_cache->indices.safe_grow_cleared (idx + 1);
1367 if (!var_cache->indices[idx])
1368 var_cache->indices[idx] = var_cache->access_refs.length () + 1;
1370 /* Grow ACCESS_REF cache if necessary. An entry is valid if its
1371 REF member is nonnull. All entries except for the last two
1372 are valid. Once nonnull, the REF value must stay unchanged. */
1373 unsigned cache_idx = var_cache->indices[idx];
1374 if (var_cache->access_refs.length () <= cache_idx)
1375 var_cache->access_refs.safe_grow_cleared (cache_idx + 1);
1377 access_ref &cache_ref = var_cache->access_refs[cache_idx];
1378 if (cache_ref.ref)
1380 gcc_checking_assert (cache_ref.ref == ref.ref);
1381 return;
1384 cache_ref = ref;
1387 /* Flush the cache if it's nonnull. */
1389 void
1390 pointer_query::flush_cache ()
1392 if (!var_cache)
1393 return;
1394 var_cache->indices.release ();
1395 var_cache->access_refs.release ();
1398 /* Dump statistics and, optionally, cache contents to DUMP_FILE. */
1400 void
1401 pointer_query::dump (FILE *dump_file, bool contents /* = false */)
1403 unsigned nused = 0, nrefs = 0;
1404 unsigned nidxs = var_cache->indices.length ();
1405 for (unsigned i = 0; i != nidxs; ++i)
1407 unsigned ari = var_cache->indices[i];
1408 if (!ari)
1409 continue;
1411 ++nused;
1413 const access_ref &aref = var_cache->access_refs[ari];
1414 if (!aref.ref)
1415 continue;
1417 ++nrefs;
1420 fprintf (dump_file, "pointer_query counters:\n"
1421 " index cache size: %u\n"
1422 " index entries: %u\n"
1423 " access cache size: %u\n"
1424 " access entries: %u\n"
1425 " hits: %u\n"
1426 " misses: %u\n"
1427 " failures: %u\n"
1428 " max_depth: %u\n",
1429 nidxs, nused,
1430 var_cache->access_refs.length (), nrefs,
1431 hits, misses, failures, max_depth);
1433 if (!contents || !nidxs)
1434 return;
1436 fputs ("\npointer_query cache contents:\n", dump_file);
1438 for (unsigned i = 0; i != nidxs; ++i)
1440 unsigned ari = var_cache->indices[i];
1441 if (!ari)
1442 continue;
1444 const access_ref &aref = var_cache->access_refs[ari];
1445 if (!aref.ref)
1446 continue;
1448 /* The level-1 cache index corresponds to the SSA_NAME_VERSION
1449 shifted left by one and ORed with the Object Size Type in
1450 the lowest bit. Print the two separately. */
1451 unsigned ver = i >> 1;
1452 unsigned ost = i & 1;
1454 fprintf (dump_file, " %u.%u[%u]: ", ver, ost, ari);
1455 if (tree name = ssa_name (ver))
1457 print_generic_expr (dump_file, name);
1458 fputs (" = ", dump_file);
1460 else
1461 fprintf (dump_file, " _%u = ", ver);
1463 if (gphi *phi = aref.phi ())
1465 fputs ("PHI <", dump_file);
1466 unsigned nargs = gimple_phi_num_args (phi);
1467 for (unsigned i = 0; i != nargs; ++i)
1469 tree arg = gimple_phi_arg_def (phi, i);
1470 print_generic_expr (dump_file, arg);
1471 if (i + 1 < nargs)
1472 fputs (", ", dump_file);
1474 fputc ('>', dump_file);
1476 else
1477 print_generic_expr (dump_file, aref.ref);
1479 if (aref.offrng[0] != aref.offrng[1])
1480 fprintf (dump_file, " + [%lli, %lli]",
1481 (long long) aref.offrng[0].to_shwi (),
1482 (long long) aref.offrng[1].to_shwi ());
1483 else if (aref.offrng[0] != 0)
1484 fprintf (dump_file, " %c %lli",
1485 aref.offrng[0] < 0 ? '-' : '+',
1486 (long long) aref.offrng[0].to_shwi ());
1488 fputc ('\n', dump_file);
1491 fputc ('\n', dump_file);
1494 /* A helper of compute_objsize_r() to determine the size from an assignment
1495 statement STMT with the RHS of either MIN_EXPR or MAX_EXPR. On success
1496 set PREF->REF to the operand with more or less space remaining,
1497 respectively, if both refer to the same (sub)object, or to PTR if they
1498 might not, and return true. Otherwise, if the identity of neither
1499 operand can be determined, return false. */
1501 static bool
1502 handle_min_max_size (tree ptr, int ostype, access_ref *pref,
1503 ssa_name_limit_t &snlim, pointer_query *qry)
1505 const gimple *stmt = SSA_NAME_DEF_STMT (ptr);
1506 const tree_code code = gimple_assign_rhs_code (stmt);
1508 /* In a valid MAX_/MIN_EXPR both operands must refer to the same array.
1509 Determine the size/offset of each and use the one with more or less
1510 space remaining, respectively. If either fails, use the information
1511 determined from the other instead, adjusted up or down as appropriate
1512 for the expression. */
1513 access_ref aref[2] = { *pref, *pref };
1514 tree arg1 = gimple_assign_rhs1 (stmt);
1515 if (!compute_objsize_r (arg1, ostype, &aref[0], snlim, qry))
1517 aref[0].base0 = false;
1518 aref[0].offrng[0] = aref[0].offrng[1] = 0;
1519 aref[0].add_max_offset ();
1520 aref[0].set_max_size_range ();
1523 tree arg2 = gimple_assign_rhs2 (stmt);
1524 if (!compute_objsize_r (arg2, ostype, &aref[1], snlim, qry))
1526 aref[1].base0 = false;
1527 aref[1].offrng[0] = aref[1].offrng[1] = 0;
1528 aref[1].add_max_offset ();
1529 aref[1].set_max_size_range ();
1532 if (!aref[0].ref && !aref[1].ref)
1533 /* Fail if the identity of neither argument could be determined. */
1534 return false;
1536 bool i0 = false;
1537 if (aref[0].ref && aref[0].base0)
1539 if (aref[1].ref && aref[1].base0)
1541 /* If the object referenced by both arguments has been determined
1542 set *PREF to the one with more or less space remainng, whichever
1543 is appopriate for CODE.
1544 TODO: Indicate when the objects are distinct so it can be
1545 diagnosed. */
1546 i0 = code == MAX_EXPR;
1547 const bool i1 = !i0;
1549 if (aref[i0].size_remaining () < aref[i1].size_remaining ())
1550 *pref = aref[i1];
1551 else
1552 *pref = aref[i0];
1554 if (aref[i0].ref != aref[i1].ref)
1555 /* If the operands don't refer to the same (sub)object set
1556 PREF->REF to the SSA_NAME from which STMT was obtained
1557 so that both can be identified in a diagnostic. */
1558 pref->ref = ptr;
1560 return true;
1563 /* If only the object referenced by one of the arguments could be
1564 determined, use it and... */
1565 *pref = aref[0];
1566 i0 = true;
1568 else
1569 *pref = aref[1];
1571 const bool i1 = !i0;
1572 /* ...see if the offset obtained from the other pointer can be used
1573 to tighten up the bound on the offset obtained from the first. */
1574 if ((code == MAX_EXPR && aref[i1].offrng[1] < aref[i0].offrng[0])
1575 || (code == MIN_EXPR && aref[i0].offrng[0] < aref[i1].offrng[1]))
1577 pref->offrng[0] = aref[i0].offrng[0];
1578 pref->offrng[1] = aref[i0].offrng[1];
1581 /* Replace PTR->REF with the SSA_NAME to indicate the expression
1582 might not refer to the same (sub)object. */
1583 pref->ref = ptr;
1584 return true;
1587 /* A helper of compute_objsize_r() to determine the size from ARRAY_REF
1588 AREF. ADDR is true if PTR is the operand of ADDR_EXPR. Return true
1589 on success and false on failure. */
1591 static bool
1592 handle_array_ref (tree aref, bool addr, int ostype, access_ref *pref,
1593 ssa_name_limit_t &snlim, pointer_query *qry)
1595 gcc_assert (TREE_CODE (aref) == ARRAY_REF);
1597 ++pref->deref;
1599 tree arefop = TREE_OPERAND (aref, 0);
1600 tree reftype = TREE_TYPE (arefop);
1601 if (!addr && TREE_CODE (TREE_TYPE (reftype)) == POINTER_TYPE)
1602 /* Avoid arrays of pointers. FIXME: Hande pointers to arrays
1603 of known bound. */
1604 return false;
1606 if (!compute_objsize_r (arefop, ostype, pref, snlim, qry))
1607 return false;
1609 offset_int orng[2];
1610 tree off = pref->eval (TREE_OPERAND (aref, 1));
1611 range_query *const rvals = qry ? qry->rvals : NULL;
1612 if (!get_offset_range (off, NULL, orng, rvals))
1614 /* Set ORNG to the maximum offset representable in ptrdiff_t. */
1615 orng[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
1616 orng[0] = -orng[1] - 1;
1619 /* Convert the array index range determined above to a byte
1620 offset. */
1621 tree lowbnd = array_ref_low_bound (aref);
1622 if (!integer_zerop (lowbnd) && tree_fits_uhwi_p (lowbnd))
1624 /* Adjust the index by the low bound of the array domain
1625 (normally zero but 1 in Fortran). */
1626 unsigned HOST_WIDE_INT lb = tree_to_uhwi (lowbnd);
1627 orng[0] -= lb;
1628 orng[1] -= lb;
1631 tree eltype = TREE_TYPE (aref);
1632 tree tpsize = TYPE_SIZE_UNIT (eltype);
1633 if (!tpsize || TREE_CODE (tpsize) != INTEGER_CST)
1635 pref->add_max_offset ();
1636 return true;
1639 offset_int sz = wi::to_offset (tpsize);
1640 orng[0] *= sz;
1641 orng[1] *= sz;
1643 if (ostype && TREE_CODE (eltype) == ARRAY_TYPE)
1645 /* Except for the permissive raw memory functions which use
1646 the size of the whole object determined above, use the size
1647 of the referenced array. Because the overall offset is from
1648 the beginning of the complete array object add this overall
1649 offset to the size of array. */
1650 offset_int sizrng[2] =
1652 pref->offrng[0] + orng[0] + sz,
1653 pref->offrng[1] + orng[1] + sz
1655 if (sizrng[1] < sizrng[0])
1656 std::swap (sizrng[0], sizrng[1]);
1657 if (sizrng[0] >= 0 && sizrng[0] <= pref->sizrng[0])
1658 pref->sizrng[0] = sizrng[0];
1659 if (sizrng[1] >= 0 && sizrng[1] <= pref->sizrng[1])
1660 pref->sizrng[1] = sizrng[1];
1663 pref->add_offset (orng[0], orng[1]);
1664 return true;
1667 /* A helper of compute_objsize_r() to determine the size from MEM_REF
1668 MREF. Return true on success and false on failure. */
1670 static bool
1671 handle_mem_ref (tree mref, int ostype, access_ref *pref,
1672 ssa_name_limit_t &snlim, pointer_query *qry)
1674 gcc_assert (TREE_CODE (mref) == MEM_REF);
1676 ++pref->deref;
1678 if (VECTOR_TYPE_P (TREE_TYPE (mref)))
1680 /* Hack: Handle MEM_REFs of vector types as those to complete
1681 objects; those may be synthesized from multiple assignments
1682 to consecutive data members (see PR 93200 and 96963).
1683 FIXME: Vectorized assignments should only be present after
1684 vectorization so this hack is only necessary after it has
1685 run and could be avoided in calls from prior passes (e.g.,
1686 tree-ssa-strlen.c).
1687 FIXME: Deal with this more generally, e.g., by marking up
1688 such MEM_REFs at the time they're created. */
1689 ostype = 0;
1692 tree mrefop = TREE_OPERAND (mref, 0);
1693 if (!compute_objsize_r (mrefop, ostype, pref, snlim, qry))
1694 return false;
1696 offset_int orng[2];
1697 tree off = pref->eval (TREE_OPERAND (mref, 1));
1698 range_query *const rvals = qry ? qry->rvals : NULL;
1699 if (!get_offset_range (off, NULL, orng, rvals))
1701 /* Set ORNG to the maximum offset representable in ptrdiff_t. */
1702 orng[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
1703 orng[0] = -orng[1] - 1;
1706 pref->add_offset (orng[0], orng[1]);
1707 return true;
1710 /* Helper to compute the size of the object referenced by the PTR
1711 expression which must have pointer type, using Object Size type
1712 OSTYPE (only the least significant 2 bits are used).
1713 On success, sets PREF->REF to the DECL of the referenced object
1714 if it's unique, otherwise to null, PREF->OFFRNG to the range of
1715 offsets into it, and PREF->SIZRNG to the range of sizes of
1716 the object(s).
1717 SNLIM is used to avoid visiting the same PHI operand multiple
1718 times, and, when nonnull, RVALS to determine range information.
1719 Returns true on success, false when a meaningful size (or range)
1720 cannot be determined.
1722 The function is intended for diagnostics and should not be used
1723 to influence code generation or optimization. */
1725 static bool
1726 compute_objsize_r (tree ptr, int ostype, access_ref *pref,
1727 ssa_name_limit_t &snlim, pointer_query *qry)
1729 STRIP_NOPS (ptr);
1731 const bool addr = TREE_CODE (ptr) == ADDR_EXPR;
1732 if (addr)
1734 --pref->deref;
1735 ptr = TREE_OPERAND (ptr, 0);
1738 if (DECL_P (ptr))
1740 pref->ref = ptr;
1742 /* Reset the offset in case it was set by a prior call and not
1743 cleared by the caller. The offset is only adjusted after
1744 the identity of the object has been determined. */
1745 pref->offrng[0] = pref->offrng[1] = 0;
1747 if (!addr && POINTER_TYPE_P (TREE_TYPE (ptr)))
1749 /* Set the maximum size if the reference is to the pointer
1750 itself (as opposed to what it points to), and clear
1751 BASE0 since the offset isn't necessarily zero-based. */
1752 pref->set_max_size_range ();
1753 pref->base0 = false;
1754 return true;
1757 /* Valid offsets into the object are nonnegative. */
1758 pref->base0 = true;
1760 if (tree size = decl_init_size (ptr, false))
1761 if (TREE_CODE (size) == INTEGER_CST)
1763 pref->sizrng[0] = pref->sizrng[1] = wi::to_offset (size);
1764 return true;
1767 pref->set_max_size_range ();
1768 return true;
1771 const tree_code code = TREE_CODE (ptr);
1772 range_query *const rvals = qry ? qry->rvals : NULL;
1774 if (code == BIT_FIELD_REF)
1776 tree ref = TREE_OPERAND (ptr, 0);
1777 if (!compute_objsize_r (ref, ostype, pref, snlim, qry))
1778 return false;
1780 offset_int off = wi::to_offset (pref->eval (TREE_OPERAND (ptr, 2)));
1781 pref->add_offset (off / BITS_PER_UNIT);
1782 return true;
1785 if (code == COMPONENT_REF)
1787 tree ref = TREE_OPERAND (ptr, 0);
1788 if (TREE_CODE (TREE_TYPE (ref)) == UNION_TYPE)
1789 /* In accesses through union types consider the entire unions
1790 rather than just their members. */
1791 ostype = 0;
1792 tree field = TREE_OPERAND (ptr, 1);
1794 if (ostype == 0)
1796 /* In OSTYPE zero (for raw memory functions like memcpy), use
1797 the maximum size instead if the identity of the enclosing
1798 object cannot be determined. */
1799 if (!compute_objsize_r (ref, ostype, pref, snlim, qry))
1800 return false;
1802 /* Otherwise, use the size of the enclosing object and add
1803 the offset of the member to the offset computed so far. */
1804 tree offset = byte_position (field);
1805 if (TREE_CODE (offset) == INTEGER_CST)
1806 pref->add_offset (wi::to_offset (offset));
1807 else
1808 pref->add_max_offset ();
1810 if (!pref->ref)
1811 /* REF may have been already set to an SSA_NAME earlier
1812 to provide better context for diagnostics. In that case,
1813 leave it unchanged. */
1814 pref->ref = ref;
1815 return true;
1818 pref->ref = field;
1820 if (!addr && POINTER_TYPE_P (TREE_TYPE (field)))
1822 /* Set maximum size if the reference is to the pointer member
1823 itself (as opposed to what it points to). */
1824 pref->set_max_size_range ();
1825 return true;
1828 /* SAM is set for array members that might need special treatment. */
1829 special_array_member sam;
1830 tree size = component_ref_size (ptr, &sam);
1831 if (sam == special_array_member::int_0)
1832 pref->sizrng[0] = pref->sizrng[1] = 0;
1833 else if (!pref->trail1special && sam == special_array_member::trail_1)
1834 pref->sizrng[0] = pref->sizrng[1] = 1;
1835 else if (size && TREE_CODE (size) == INTEGER_CST)
1836 pref->sizrng[0] = pref->sizrng[1] = wi::to_offset (size);
1837 else
1839 /* When the size of the member is unknown it's either a flexible
1840 array member or a trailing special array member (either zero
1841 length or one-element). Set the size to the maximum minus
1842 the constant size of the type. */
1843 pref->sizrng[0] = 0;
1844 pref->sizrng[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
1845 if (tree recsize = TYPE_SIZE_UNIT (TREE_TYPE (ref)))
1846 if (TREE_CODE (recsize) == INTEGER_CST)
1847 pref->sizrng[1] -= wi::to_offset (recsize);
1849 return true;
1852 if (code == ARRAY_REF)
1853 return handle_array_ref (ptr, addr, ostype, pref, snlim, qry);
1855 if (code == MEM_REF)
1856 return handle_mem_ref (ptr, ostype, pref, snlim, qry);
1858 if (code == TARGET_MEM_REF)
1860 tree ref = TREE_OPERAND (ptr, 0);
1861 if (!compute_objsize_r (ref, ostype, pref, snlim, qry))
1862 return false;
1864 /* TODO: Handle remaining operands. Until then, add maximum offset. */
1865 pref->ref = ptr;
1866 pref->add_max_offset ();
1867 return true;
1870 if (code == INTEGER_CST)
1872 /* Pointer constants other than null are most likely the result
1873 of erroneous null pointer addition/subtraction. Unless zero
1874 is a valid address set size to zero. For null pointers, set
1875 size to the maximum for now since those may be the result of
1876 jump threading. */
1877 if (integer_zerop (ptr))
1878 pref->set_max_size_range ();
1879 else if (POINTER_TYPE_P (TREE_TYPE (ptr)))
1881 tree deref_type = TREE_TYPE (TREE_TYPE (ptr));
1882 addr_space_t as = TYPE_ADDR_SPACE (deref_type);
1883 if (targetm.addr_space.zero_address_valid (as))
1884 pref->set_max_size_range ();
1885 else
1886 pref->sizrng[0] = pref->sizrng[1] = 0;
1888 else
1889 pref->sizrng[0] = pref->sizrng[1] = 0;
1891 pref->ref = ptr;
1893 return true;
1896 if (code == STRING_CST)
1898 pref->sizrng[0] = pref->sizrng[1] = TREE_STRING_LENGTH (ptr);
1899 pref->ref = ptr;
1900 return true;
1903 if (code == POINTER_PLUS_EXPR)
1905 tree ref = TREE_OPERAND (ptr, 0);
1906 if (!compute_objsize_r (ref, ostype, pref, snlim, qry))
1907 return false;
1909 /* Clear DEREF since the offset is being applied to the target
1910 of the dereference. */
1911 pref->deref = 0;
1913 offset_int orng[2];
1914 tree off = pref->eval (TREE_OPERAND (ptr, 1));
1915 if (get_offset_range (off, NULL, orng, rvals))
1916 pref->add_offset (orng[0], orng[1]);
1917 else
1918 pref->add_max_offset ();
1919 return true;
1922 if (code == VIEW_CONVERT_EXPR)
1924 ptr = TREE_OPERAND (ptr, 0);
1925 return compute_objsize_r (ptr, ostype, pref, snlim, qry);
1928 if (code == SSA_NAME)
1930 if (!snlim.next ())
1931 return false;
1933 /* Only process an SSA_NAME if the recursion limit has not yet
1934 been reached. */
1935 if (qry)
1937 if (++qry->depth)
1938 qry->max_depth = qry->depth;
1939 if (const access_ref *cache_ref = qry->get_ref (ptr))
1941 /* If the pointer is in the cache set *PREF to what it refers
1942 to and return success.
1943 FIXME: BNDRNG is determined by each access and so it doesn't
1944 belong in access_ref. Until the design is changed, keep it
1945 unchanged here. */
1946 const offset_int bndrng[2] = { pref->bndrng[0], pref->bndrng[1] };
1947 *pref = *cache_ref;
1948 pref->bndrng[0] = bndrng[0];
1949 pref->bndrng[1] = bndrng[1];
1950 return true;
1954 gimple *stmt = SSA_NAME_DEF_STMT (ptr);
1955 if (is_gimple_call (stmt))
1957 /* If STMT is a call to an allocation function get the size
1958 from its argument(s). If successful, also set *PREF->REF
1959 to PTR for the caller to include in diagnostics. */
1960 wide_int wr[2];
1961 if (gimple_call_alloc_size (stmt, wr, rvals))
1963 pref->ref = ptr;
1964 pref->sizrng[0] = offset_int::from (wr[0], UNSIGNED);
1965 pref->sizrng[1] = offset_int::from (wr[1], UNSIGNED);
1966 /* Constrain both bounds to a valid size. */
1967 offset_int maxsize = wi::to_offset (max_object_size ());
1968 if (pref->sizrng[0] > maxsize)
1969 pref->sizrng[0] = maxsize;
1970 if (pref->sizrng[1] > maxsize)
1971 pref->sizrng[1] = maxsize;
1973 else
1975 /* For functions known to return one of their pointer arguments
1976 try to determine what the returned pointer points to, and on
1977 success add OFFRNG which was set to the offset added by
1978 the function (e.g., memchr) to the overall offset. */
1979 bool past_end;
1980 offset_int offrng[2];
1981 if (tree ret = gimple_call_return_array (stmt, offrng,
1982 &past_end, rvals))
1984 if (!compute_objsize_r (ret, ostype, pref, snlim, qry))
1985 return false;
1987 /* Cap OFFRNG[1] to at most the remaining size of
1988 the object. */
1989 offset_int remrng[2];
1990 remrng[1] = pref->size_remaining (remrng);
1991 if (remrng[1] != 0 && !past_end)
1992 /* Decrement the size for functions that never return
1993 a past-the-end pointer. */
1994 remrng[1] -= 1;
1996 if (remrng[1] < offrng[1])
1997 offrng[1] = remrng[1];
1998 pref->add_offset (offrng[0], offrng[1]);
2000 else
2002 /* For other calls that might return arbitrary pointers
2003 including into the middle of objects set the size
2004 range to maximum, clear PREF->BASE0, and also set
2005 PREF->REF to include in diagnostics. */
2006 pref->set_max_size_range ();
2007 pref->base0 = false;
2008 pref->ref = ptr;
2011 qry->put_ref (ptr, *pref);
2012 return true;
2015 if (gimple_nop_p (stmt))
2017 /* For a function argument try to determine the byte size
2018 of the array from the current function declaratation
2019 (e.g., attribute access or related). */
2020 wide_int wr[2];
2021 bool static_array = false;
2022 if (tree ref = gimple_parm_array_size (ptr, wr, &static_array))
2024 pref->parmarray = !static_array;
2025 pref->sizrng[0] = offset_int::from (wr[0], UNSIGNED);
2026 pref->sizrng[1] = offset_int::from (wr[1], UNSIGNED);
2027 pref->ref = ref;
2028 qry->put_ref (ptr, *pref);
2029 return true;
2032 pref->set_max_size_range ();
2033 pref->base0 = false;
2034 pref->ref = ptr;
2035 qry->put_ref (ptr, *pref);
2036 return true;
2039 if (gimple_code (stmt) == GIMPLE_PHI)
2041 pref->ref = ptr;
2042 access_ref phi_ref = *pref;
2043 if (!pref->get_ref (NULL, &phi_ref, ostype, &snlim, qry))
2044 return false;
2045 *pref = phi_ref;
2046 pref->ref = ptr;
2047 qry->put_ref (ptr, *pref);
2048 return true;
2051 if (!is_gimple_assign (stmt))
2053 /* Clear BASE0 since the assigned pointer might point into
2054 the middle of the object, set the maximum size range and,
2055 if the SSA_NAME refers to a function argumnent, set
2056 PREF->REF to it. */
2057 pref->base0 = false;
2058 pref->set_max_size_range ();
2059 pref->ref = ptr;
2060 return true;
2063 tree_code code = gimple_assign_rhs_code (stmt);
2065 if (code == MAX_EXPR || code == MIN_EXPR)
2067 if (!handle_min_max_size (ptr, ostype, pref, snlim, qry))
2068 return false;
2070 qry->put_ref (ptr, *pref);
2071 return true;
2074 tree rhs = gimple_assign_rhs1 (stmt);
2076 if (code == ASSERT_EXPR)
2078 rhs = TREE_OPERAND (rhs, 0);
2079 return compute_objsize_r (rhs, ostype, pref, snlim, qry);
2082 if (code == POINTER_PLUS_EXPR
2083 && TREE_CODE (TREE_TYPE (rhs)) == POINTER_TYPE)
2085 /* Compute the size of the object first. */
2086 if (!compute_objsize_r (rhs, ostype, pref, snlim, qry))
2087 return false;
2089 offset_int orng[2];
2090 tree off = gimple_assign_rhs2 (stmt);
2091 if (get_offset_range (off, stmt, orng, rvals))
2092 pref->add_offset (orng[0], orng[1]);
2093 else
2094 pref->add_max_offset ();
2096 qry->put_ref (ptr, *pref);
2097 return true;
2100 if (code == ADDR_EXPR || code == SSA_NAME)
2102 if (!compute_objsize_r (rhs, ostype, pref, snlim, qry))
2103 return false;
2104 qry->put_ref (ptr, *pref);
2105 return true;
2108 /* (This could also be an assignment from a nonlocal pointer.) Save
2109 PTR to mention in diagnostics but otherwise treat it as a pointer
2110 to an unknown object. */
2111 pref->ref = rhs;
2112 pref->base0 = false;
2113 pref->set_max_size_range ();
2114 return true;
2117 /* Assume all other expressions point into an unknown object
2118 of the maximum valid size. */
2119 pref->ref = ptr;
2120 pref->base0 = false;
2121 pref->set_max_size_range ();
2122 if (TREE_CODE (ptr) == SSA_NAME)
2123 qry->put_ref (ptr, *pref);
2124 return true;
2127 /* A "public" wrapper around the above. Clients should use this overload
2128 instead. */
2130 tree
2131 compute_objsize (tree ptr, int ostype, access_ref *pref,
2132 range_query *rvals /* = NULL */)
2134 pointer_query qry;
2135 qry.rvals = rvals;
2137 /* Clear and invalidate in case *PREF is being reused. */
2138 pref->offrng[0] = pref->offrng[1] = 0;
2139 pref->sizrng[0] = pref->sizrng[1] = -1;
2141 ssa_name_limit_t snlim;
2142 if (!compute_objsize_r (ptr, ostype, pref, snlim, &qry))
2143 return NULL_TREE;
2145 offset_int maxsize = pref->size_remaining ();
2146 if (pref->base0 && pref->offrng[0] < 0 && pref->offrng[1] >= 0)
2147 pref->offrng[0] = 0;
2148 return wide_int_to_tree (sizetype, maxsize);
2151 /* Transitional wrapper. The function should be removed once callers
2152 transition to the pointer_query API. */
2154 tree
2155 compute_objsize (tree ptr, int ostype, access_ref *pref, pointer_query *ptr_qry)
2157 pointer_query qry;
2158 if (ptr_qry)
2159 ptr_qry->depth = 0;
2160 else
2161 ptr_qry = &qry;
2163 /* Clear and invalidate in case *PREF is being reused. */
2164 pref->offrng[0] = pref->offrng[1] = 0;
2165 pref->sizrng[0] = pref->sizrng[1] = -1;
2167 ssa_name_limit_t snlim;
2168 if (!compute_objsize_r (ptr, ostype, pref, snlim, ptr_qry))
2169 return NULL_TREE;
2171 offset_int maxsize = pref->size_remaining ();
2172 if (pref->base0 && pref->offrng[0] < 0 && pref->offrng[1] >= 0)
2173 pref->offrng[0] = 0;
2174 return wide_int_to_tree (sizetype, maxsize);
2177 /* Legacy wrapper around the above. The function should be removed
2178 once callers transition to one of the two above. */
2180 tree
2181 compute_objsize (tree ptr, int ostype, tree *pdecl /* = NULL */,
2182 tree *poff /* = NULL */, range_query *rvals /* = NULL */)
2184 /* Set the initial offsets to zero and size to negative to indicate
2185 none has been computed yet. */
2186 access_ref ref;
2187 tree size = compute_objsize (ptr, ostype, &ref, rvals);
2188 if (!size || !ref.base0)
2189 return NULL_TREE;
2191 if (pdecl)
2192 *pdecl = ref.ref;
2194 if (poff)
2195 *poff = wide_int_to_tree (ptrdiff_type_node, ref.offrng[ref.offrng[0] < 0]);
2197 return size;