Daily bump.
[official-gcc.git] / gcc / pointer-query.cc
blob4bedf7fca47bf4698e56407f1ccd7e2496cf0df3
1 /* Definitions of the pointer_query and related classes.
3 Copyright (C) 2020-2021 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "stringpool.h"
28 #include "tree-vrp.h"
29 #include "diagnostic-core.h"
30 #include "fold-const.h"
31 #include "tree-object-size.h"
32 #include "tree-ssa-strlen.h"
33 #include "langhooks.h"
34 #include "stringpool.h"
35 #include "attribs.h"
36 #include "gimple-fold.h"
37 #include "gimple-ssa.h"
38 #include "intl.h"
39 #include "attr-fnspec.h"
40 #include "gimple-range.h"
41 #include "pointer-query.h"
42 #include "tree-pretty-print.h"
43 #include "tree-ssanames.h"
44 #include "target.h"
46 static bool compute_objsize_r (tree, gimple *, bool, int, access_ref *,
47 ssa_name_limit_t &, pointer_query *);
49 /* Wrapper around the wide_int overload of get_range that accepts
50 offset_int instead. For middle end expressions returns the same
51 result. For a subset of nonconstamt expressions emitted by the front
52 end determines a more precise range than would be possible otherwise. */
54 static bool
55 get_offset_range (tree x, gimple *stmt, offset_int r[2], range_query *rvals)
57 offset_int add = 0;
58 if (TREE_CODE (x) == PLUS_EXPR)
60 /* Handle constant offsets in pointer addition expressions seen
61 n the front end IL. */
62 tree op = TREE_OPERAND (x, 1);
63 if (TREE_CODE (op) == INTEGER_CST)
65 op = fold_convert (signed_type_for (TREE_TYPE (op)), op);
66 add = wi::to_offset (op);
67 x = TREE_OPERAND (x, 0);
71 if (TREE_CODE (x) == NOP_EXPR)
72 /* Also handle conversions to sizetype seen in the front end IL. */
73 x = TREE_OPERAND (x, 0);
75 tree type = TREE_TYPE (x);
76 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
77 return false;
79 if (TREE_CODE (x) != INTEGER_CST
80 && TREE_CODE (x) != SSA_NAME)
82 if (TYPE_UNSIGNED (type)
83 && TYPE_PRECISION (type) == TYPE_PRECISION (sizetype))
84 type = signed_type_for (type);
86 r[0] = wi::to_offset (TYPE_MIN_VALUE (type)) + add;
87 r[1] = wi::to_offset (TYPE_MAX_VALUE (type)) + add;
88 return x;
91 wide_int wr[2];
92 if (!get_range (x, stmt, wr, rvals))
93 return false;
95 signop sgn = SIGNED;
96 /* Only convert signed integers or unsigned sizetype to a signed
97 offset and avoid converting large positive values in narrower
98 types to negative offsets. */
99 if (TYPE_UNSIGNED (type)
100 && wr[0].get_precision () < TYPE_PRECISION (sizetype))
101 sgn = UNSIGNED;
103 r[0] = offset_int::from (wr[0], sgn);
104 r[1] = offset_int::from (wr[1], sgn);
105 return true;
108 /* Return the argument that the call STMT to a built-in function returns
109 or null if it doesn't. On success, set OFFRNG[] to the range of offsets
110 from the argument reflected in the value returned by the built-in if it
111 can be determined, otherwise to 0 and HWI_M1U respectively. Set
112 *PAST_END for functions like mempcpy that might return a past the end
113 pointer (most functions return a dereferenceable pointer to an existing
114 element of an array). */
116 static tree
117 gimple_call_return_array (gimple *stmt, offset_int offrng[2], bool *past_end,
118 ssa_name_limit_t &snlim, pointer_query *qry)
120 /* Clear and set below for the rare function(s) that might return
121 a past-the-end pointer. */
122 *past_end = false;
125 /* Check for attribute fn spec to see if the function returns one
126 of its arguments. */
127 attr_fnspec fnspec = gimple_call_fnspec (as_a <gcall *>(stmt));
128 unsigned int argno;
129 if (fnspec.returns_arg (&argno))
131 /* Functions return the first argument (not a range). */
132 offrng[0] = offrng[1] = 0;
133 return gimple_call_arg (stmt, argno);
137 if (gimple_call_num_args (stmt) < 1)
138 return NULL_TREE;
140 tree fn = gimple_call_fndecl (stmt);
141 if (!gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
143 /* See if this is a call to placement new. */
144 if (!fn
145 || !DECL_IS_OPERATOR_NEW_P (fn)
146 || DECL_IS_REPLACEABLE_OPERATOR_NEW_P (fn))
147 return NULL_TREE;
149 /* Check the mangling, keeping in mind that operator new takes
150 a size_t which could be unsigned int or unsigned long. */
151 tree fname = DECL_ASSEMBLER_NAME (fn);
152 if (!id_equal (fname, "_ZnwjPv") // ordinary form
153 && !id_equal (fname, "_ZnwmPv") // ordinary form
154 && !id_equal (fname, "_ZnajPv") // array form
155 && !id_equal (fname, "_ZnamPv")) // array form
156 return NULL_TREE;
158 if (gimple_call_num_args (stmt) != 2)
159 return NULL_TREE;
161 /* Allocation functions return a pointer to the beginning. */
162 offrng[0] = offrng[1] = 0;
163 return gimple_call_arg (stmt, 1);
166 switch (DECL_FUNCTION_CODE (fn))
168 case BUILT_IN_MEMCPY:
169 case BUILT_IN_MEMCPY_CHK:
170 case BUILT_IN_MEMMOVE:
171 case BUILT_IN_MEMMOVE_CHK:
172 case BUILT_IN_MEMSET:
173 case BUILT_IN_STRCAT:
174 case BUILT_IN_STRCAT_CHK:
175 case BUILT_IN_STRCPY:
176 case BUILT_IN_STRCPY_CHK:
177 case BUILT_IN_STRNCAT:
178 case BUILT_IN_STRNCAT_CHK:
179 case BUILT_IN_STRNCPY:
180 case BUILT_IN_STRNCPY_CHK:
181 /* Functions return the first argument (not a range). */
182 offrng[0] = offrng[1] = 0;
183 return gimple_call_arg (stmt, 0);
185 case BUILT_IN_MEMPCPY:
186 case BUILT_IN_MEMPCPY_CHK:
188 /* The returned pointer is in a range constrained by the smaller
189 of the upper bound of the size argument and the source object
190 size. */
191 offrng[0] = 0;
192 offrng[1] = HOST_WIDE_INT_M1U;
193 tree off = gimple_call_arg (stmt, 2);
194 bool off_valid = get_offset_range (off, stmt, offrng, qry->rvals);
195 if (!off_valid || offrng[0] != offrng[1])
197 /* If the offset is either indeterminate or in some range,
198 try to constrain its upper bound to at most the size
199 of the source object. */
200 access_ref aref;
201 tree src = gimple_call_arg (stmt, 1);
202 if (compute_objsize_r (src, stmt, false, 1, &aref, snlim, qry)
203 && aref.sizrng[1] < offrng[1])
204 offrng[1] = aref.sizrng[1];
207 /* Mempcpy may return a past-the-end pointer. */
208 *past_end = true;
209 return gimple_call_arg (stmt, 0);
212 case BUILT_IN_MEMCHR:
214 tree off = gimple_call_arg (stmt, 2);
215 if (get_offset_range (off, stmt, offrng, qry->rvals))
216 offrng[1] -= 1;
217 else
218 offrng[1] = HOST_WIDE_INT_M1U;
220 offrng[0] = 0;
221 return gimple_call_arg (stmt, 0);
224 case BUILT_IN_STRCHR:
225 case BUILT_IN_STRRCHR:
226 case BUILT_IN_STRSTR:
227 offrng[0] = 0;
228 offrng[1] = HOST_WIDE_INT_M1U;
229 return gimple_call_arg (stmt, 0);
231 case BUILT_IN_STPCPY:
232 case BUILT_IN_STPCPY_CHK:
234 access_ref aref;
235 tree src = gimple_call_arg (stmt, 1);
236 if (compute_objsize_r (src, stmt, false, 1, &aref, snlim, qry))
237 offrng[1] = aref.sizrng[1] - 1;
238 else
239 offrng[1] = HOST_WIDE_INT_M1U;
241 offrng[0] = 0;
242 return gimple_call_arg (stmt, 0);
245 case BUILT_IN_STPNCPY:
246 case BUILT_IN_STPNCPY_CHK:
248 /* The returned pointer is in a range between the first argument
249 and it plus the smaller of the upper bound of the size argument
250 and the source object size. */
251 offrng[1] = HOST_WIDE_INT_M1U;
252 tree off = gimple_call_arg (stmt, 2);
253 if (!get_offset_range (off, stmt, offrng, qry->rvals)
254 || offrng[0] != offrng[1])
256 /* If the offset is either indeterminate or in some range,
257 try to constrain its upper bound to at most the size
258 of the source object. */
259 access_ref aref;
260 tree src = gimple_call_arg (stmt, 1);
261 if (compute_objsize_r (src, stmt, false, 1, &aref, snlim, qry)
262 && aref.sizrng[1] < offrng[1])
263 offrng[1] = aref.sizrng[1];
266 /* When the source is the empty string the returned pointer is
267 a copy of the argument. Otherwise stpcpy can also return
268 a past-the-end pointer. */
269 offrng[0] = 0;
270 *past_end = true;
271 return gimple_call_arg (stmt, 0);
274 default:
275 break;
278 return NULL_TREE;
281 /* Return true when EXP's range can be determined and set RANGE[] to it
282 after adjusting it if necessary to make EXP a represents a valid size
283 of object, or a valid size argument to an allocation function declared
284 with attribute alloc_size (whose argument may be signed), or to a string
285 manipulation function like memset.
286 When ALLOW_ZERO is set in FLAGS, allow returning a range of [0, 0] for
287 a size in an anti-range [1, N] where N > PTRDIFF_MAX. A zero range is
288 a (nearly) invalid argument to allocation functions like malloc but it
289 is a valid argument to functions like memset.
290 When USE_LARGEST is set in FLAGS set RANGE to the largest valid subrange
291 in a multi-range, otherwise to the smallest valid subrange. */
293 bool
294 get_size_range (range_query *query, tree exp, gimple *stmt, tree range[2],
295 int flags /* = 0 */)
297 if (!exp)
298 return false;
300 if (tree_fits_uhwi_p (exp))
302 /* EXP is a constant. */
303 range[0] = range[1] = exp;
304 return true;
307 tree exptype = TREE_TYPE (exp);
308 bool integral = INTEGRAL_TYPE_P (exptype);
310 wide_int min, max;
311 enum value_range_kind range_type;
313 if (!query)
314 query = get_range_query (cfun);
316 if (integral)
318 value_range vr;
320 query->range_of_expr (vr, exp, stmt);
322 if (vr.undefined_p ())
323 vr.set_varying (TREE_TYPE (exp));
324 range_type = vr.kind ();
325 min = wi::to_wide (vr.min ());
326 max = wi::to_wide (vr.max ());
328 else
329 range_type = VR_VARYING;
331 if (range_type == VR_VARYING)
333 if (integral)
335 /* Use the full range of the type of the expression when
336 no value range information is available. */
337 range[0] = TYPE_MIN_VALUE (exptype);
338 range[1] = TYPE_MAX_VALUE (exptype);
339 return true;
342 range[0] = NULL_TREE;
343 range[1] = NULL_TREE;
344 return false;
347 unsigned expprec = TYPE_PRECISION (exptype);
349 bool signed_p = !TYPE_UNSIGNED (exptype);
351 if (range_type == VR_ANTI_RANGE)
353 if (signed_p)
355 if (wi::les_p (max, 0))
357 /* EXP is not in a strictly negative range. That means
358 it must be in some (not necessarily strictly) positive
359 range which includes zero. Since in signed to unsigned
360 conversions negative values end up converted to large
361 positive values, and otherwise they are not valid sizes,
362 the resulting range is in both cases [0, TYPE_MAX]. */
363 min = wi::zero (expprec);
364 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
366 else if (wi::les_p (min - 1, 0))
368 /* EXP is not in a negative-positive range. That means EXP
369 is either negative, or greater than max. Since negative
370 sizes are invalid make the range [MAX + 1, TYPE_MAX]. */
371 min = max + 1;
372 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
374 else
376 max = min - 1;
377 min = wi::zero (expprec);
380 else
382 wide_int maxsize = wi::to_wide (max_object_size ());
383 min = wide_int::from (min, maxsize.get_precision (), UNSIGNED);
384 max = wide_int::from (max, maxsize.get_precision (), UNSIGNED);
385 if (wi::eq_p (0, min - 1))
387 /* EXP is unsigned and not in the range [1, MAX]. That means
388 it's either zero or greater than MAX. Even though 0 would
389 normally be detected by -Walloc-zero, unless ALLOW_ZERO
390 is set, set the range to [MAX, TYPE_MAX] so that when MAX
391 is greater than the limit the whole range is diagnosed. */
392 wide_int maxsize = wi::to_wide (max_object_size ());
393 if (flags & SR_ALLOW_ZERO)
395 if (wi::leu_p (maxsize, max + 1)
396 || !(flags & SR_USE_LARGEST))
397 min = max = wi::zero (expprec);
398 else
400 min = max + 1;
401 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
404 else
406 min = max + 1;
407 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
410 else if ((flags & SR_USE_LARGEST)
411 && wi::ltu_p (max + 1, maxsize))
413 /* When USE_LARGEST is set and the larger of the two subranges
414 is a valid size, use it... */
415 min = max + 1;
416 max = maxsize;
418 else
420 /* ...otherwise use the smaller subrange. */
421 max = min - 1;
422 min = wi::zero (expprec);
427 range[0] = wide_int_to_tree (exptype, min);
428 range[1] = wide_int_to_tree (exptype, max);
430 return true;
433 bool
434 get_size_range (tree exp, tree range[2], int flags /* = 0 */)
436 return get_size_range (/*query=*/NULL, exp, /*stmt=*/NULL, range, flags);
439 /* If STMT is a call to an allocation function, returns the constant
440 maximum size of the object allocated by the call represented as
441 sizetype. If nonnull, sets RNG1[] to the range of the size.
442 When nonnull, uses RVALS for range information, otherwise gets global
443 range info.
444 Returns null when STMT is not a call to a valid allocation function. */
446 tree
447 gimple_call_alloc_size (gimple *stmt, wide_int rng1[2] /* = NULL */,
448 range_query *qry /* = NULL */)
450 if (!stmt || !is_gimple_call (stmt))
451 return NULL_TREE;
453 tree allocfntype;
454 if (tree fndecl = gimple_call_fndecl (stmt))
455 allocfntype = TREE_TYPE (fndecl);
456 else
457 allocfntype = gimple_call_fntype (stmt);
459 if (!allocfntype)
460 return NULL_TREE;
462 unsigned argidx1 = UINT_MAX, argidx2 = UINT_MAX;
463 tree at = lookup_attribute ("alloc_size", TYPE_ATTRIBUTES (allocfntype));
464 if (!at)
466 if (!gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN))
467 return NULL_TREE;
469 argidx1 = 0;
472 unsigned nargs = gimple_call_num_args (stmt);
474 if (argidx1 == UINT_MAX)
476 tree atval = TREE_VALUE (at);
477 if (!atval)
478 return NULL_TREE;
480 argidx1 = TREE_INT_CST_LOW (TREE_VALUE (atval)) - 1;
481 if (nargs <= argidx1)
482 return NULL_TREE;
484 atval = TREE_CHAIN (atval);
485 if (atval)
487 argidx2 = TREE_INT_CST_LOW (TREE_VALUE (atval)) - 1;
488 if (nargs <= argidx2)
489 return NULL_TREE;
493 tree size = gimple_call_arg (stmt, argidx1);
495 wide_int rng1_buf[2];
496 /* If RNG1 is not set, use the buffer. */
497 if (!rng1)
498 rng1 = rng1_buf;
500 /* Use maximum precision to avoid overflow below. */
501 const int prec = ADDR_MAX_PRECISION;
504 tree r[2];
505 /* Determine the largest valid range size, including zero. */
506 if (!get_size_range (qry, size, stmt, r, SR_ALLOW_ZERO | SR_USE_LARGEST))
507 return NULL_TREE;
508 rng1[0] = wi::to_wide (r[0], prec);
509 rng1[1] = wi::to_wide (r[1], prec);
512 if (argidx2 > nargs && TREE_CODE (size) == INTEGER_CST)
513 return fold_convert (sizetype, size);
515 /* To handle ranges do the math in wide_int and return the product
516 of the upper bounds as a constant. Ignore anti-ranges. */
517 tree n = argidx2 < nargs ? gimple_call_arg (stmt, argidx2) : integer_one_node;
518 wide_int rng2[2];
520 tree r[2];
521 /* As above, use the full non-negative range on failure. */
522 if (!get_size_range (qry, n, stmt, r, SR_ALLOW_ZERO | SR_USE_LARGEST))
523 return NULL_TREE;
524 rng2[0] = wi::to_wide (r[0], prec);
525 rng2[1] = wi::to_wide (r[1], prec);
528 /* Compute products of both bounds for the caller but return the lesser
529 of SIZE_MAX and the product of the upper bounds as a constant. */
530 rng1[0] = rng1[0] * rng2[0];
531 rng1[1] = rng1[1] * rng2[1];
533 const tree size_max = TYPE_MAX_VALUE (sizetype);
534 if (wi::gtu_p (rng1[1], wi::to_wide (size_max, prec)))
536 rng1[1] = wi::to_wide (size_max, prec);
537 return size_max;
540 return wide_int_to_tree (sizetype, rng1[1]);
543 /* For an access to an object referenced to by the function parameter PTR
544 of pointer type, and set RNG[] to the range of sizes of the object
545 obtainedfrom the attribute access specification for the current function.
546 Set STATIC_ARRAY if the array parameter has been declared [static].
547 Return the function parameter on success and null otherwise. */
549 static tree
550 gimple_parm_array_size (tree ptr, wide_int rng[2],
551 bool *static_array /* = NULL */)
553 /* For a function argument try to determine the byte size of the array
554 from the current function declaratation (e.g., attribute access or
555 related). */
556 tree var = SSA_NAME_VAR (ptr);
557 if (TREE_CODE (var) != PARM_DECL)
558 return NULL_TREE;
560 const unsigned prec = TYPE_PRECISION (sizetype);
562 rdwr_map rdwr_idx;
563 attr_access *access = get_parm_access (rdwr_idx, var);
564 if (!access)
565 return NULL_TREE;
567 if (access->sizarg != UINT_MAX)
569 /* TODO: Try to extract the range from the argument based on
570 those of subsequent assertions or based on known calls to
571 the current function. */
572 return NULL_TREE;
575 if (!access->minsize)
576 return NULL_TREE;
578 /* Only consider ordinary array bound at level 2 (or above if it's
579 ever added). */
580 if (warn_array_parameter < 2 && !access->static_p)
581 return NULL_TREE;
583 if (static_array)
584 *static_array = access->static_p;
586 rng[0] = wi::zero (prec);
587 rng[1] = wi::uhwi (access->minsize, prec);
588 /* Multiply the array bound encoded in the attribute by the size
589 of what the pointer argument to which it decays points to. */
590 tree eltype = TREE_TYPE (TREE_TYPE (ptr));
591 tree size = TYPE_SIZE_UNIT (eltype);
592 if (!size || TREE_CODE (size) != INTEGER_CST)
593 return NULL_TREE;
595 rng[1] *= wi::to_wide (size, prec);
596 return var;
599 /* Initialize the object. */
601 access_ref::access_ref ()
602 : ref (), eval ([](tree x){ return x; }), deref (), trail1special (true),
603 base0 (true), parmarray ()
605 /* Set to valid. */
606 offrng[0] = offrng[1] = 0;
607 offmax[0] = offmax[1] = 0;
608 /* Invalidate. */
609 sizrng[0] = sizrng[1] = -1;
612 /* Return the PHI node REF refers to or null if it doesn't. */
614 gphi *
615 access_ref::phi () const
617 if (!ref || TREE_CODE (ref) != SSA_NAME)
618 return NULL;
620 gimple *def_stmt = SSA_NAME_DEF_STMT (ref);
621 if (!def_stmt || gimple_code (def_stmt) != GIMPLE_PHI)
622 return NULL;
624 return as_a <gphi *> (def_stmt);
627 /* Determine the size and offset for ARG, append it to ALL_REFS, and
628 merge the result with *THIS. Ignore ARG if SKIP_NULL is set and
629 ARG refers to the null pointer. Return true on success and false
630 on failure. */
632 bool
633 access_ref::merge_ref (vec<access_ref> *all_refs, tree arg, gimple *stmt,
634 int ostype, bool skip_null,
635 ssa_name_limit_t &snlim, pointer_query &qry)
637 access_ref aref;
638 if (!compute_objsize_r (arg, stmt, false, ostype, &aref, snlim, &qry)
639 || aref.sizrng[0] < 0)
640 /* This may be a PHI with all null pointer arguments. */
641 return false;
643 if (all_refs)
645 access_ref dummy_ref;
646 aref.get_ref (all_refs, &dummy_ref, ostype, &snlim, &qry);
649 if (TREE_CODE (arg) == SSA_NAME)
650 qry.put_ref (arg, aref, ostype);
652 if (all_refs)
653 all_refs->safe_push (aref);
655 aref.deref += deref;
657 bool merged_parmarray = aref.parmarray;
659 const bool nullp = skip_null && integer_zerop (arg);
660 const offset_int maxobjsize = wi::to_offset (max_object_size ());
661 offset_int minsize = sizrng[0];
663 if (sizrng[0] < 0)
665 /* If *THIS doesn't contain a meaningful result yet set it to AREF
666 unless the argument is null and it's okay to ignore it. */
667 if (!nullp)
668 *this = aref;
670 /* Set if the current argument refers to one or more objects of
671 known size (or range of sizes), as opposed to referring to
672 one or more unknown object(s). */
673 const bool arg_known_size = (aref.sizrng[0] != 0
674 || aref.sizrng[1] != maxobjsize);
675 if (arg_known_size)
676 sizrng[0] = aref.sizrng[0];
678 return true;
681 /* Disregard null pointers in PHIs with two or more arguments.
682 TODO: Handle this better! */
683 if (nullp)
684 return true;
686 const bool known_size = (sizrng[0] != 0 || sizrng[1] != maxobjsize);
688 if (known_size && aref.sizrng[0] < minsize)
689 minsize = aref.sizrng[0];
691 /* Extend the size and offset of *THIS to account for AREF. The result
692 can be cached but results in false negatives. */
694 offset_int orng[2];
695 if (sizrng[1] < aref.sizrng[1])
697 orng[0] = offrng[0];
698 orng[1] = offrng[1];
699 *this = aref;
701 else
703 orng[0] = aref.offrng[0];
704 orng[1] = aref.offrng[1];
707 if (orng[0] < offrng[0])
708 offrng[0] = orng[0];
709 if (offrng[1] < orng[1])
710 offrng[1] = orng[1];
712 /* Reset the PHI's BASE0 flag if any of the nonnull arguments
713 refers to an object at an unknown offset. */
714 if (!aref.base0)
715 base0 = false;
717 sizrng[0] = minsize;
718 parmarray = merged_parmarray;
720 return true;
723 /* Determine and return the largest object to which *THIS refers. If
724 *THIS refers to a PHI and PREF is nonnull, fill *PREF with the details
725 of the object determined by compute_objsize(ARG, OSTYPE) for each PHI
726 argument ARG. */
728 tree
729 access_ref::get_ref (vec<access_ref> *all_refs,
730 access_ref *pref /* = NULL */,
731 int ostype /* = 1 */,
732 ssa_name_limit_t *psnlim /* = NULL */,
733 pointer_query *qry /* = NULL */) const
735 if (!ref || TREE_CODE (ref) != SSA_NAME)
736 return NULL;
738 /* FIXME: Calling get_ref() with a null PSNLIM is dangerous and might
739 cause unbounded recursion. */
740 ssa_name_limit_t snlim_buf;
741 if (!psnlim)
742 psnlim = &snlim_buf;
744 pointer_query empty_qry;
745 if (!qry)
746 qry = &empty_qry;
748 if (gimple *def_stmt = SSA_NAME_DEF_STMT (ref))
750 if (is_gimple_assign (def_stmt))
752 tree_code code = gimple_assign_rhs_code (def_stmt);
753 if (code != MIN_EXPR && code != MAX_EXPR)
754 return NULL_TREE;
756 access_ref aref;
757 tree arg1 = gimple_assign_rhs1 (def_stmt);
758 if (!aref.merge_ref (all_refs, arg1, def_stmt, ostype, false,
759 *psnlim, *qry))
760 return NULL_TREE;
762 tree arg2 = gimple_assign_rhs2 (def_stmt);
763 if (!aref.merge_ref (all_refs, arg2, def_stmt, ostype, false,
764 *psnlim, *qry))
765 return NULL_TREE;
767 if (pref && pref != this)
769 tree ref = pref->ref;
770 *pref = aref;
771 pref->ref = ref;
774 return aref.ref;
777 else
778 return NULL_TREE;
780 gphi *phi_stmt = this->phi ();
781 if (!phi_stmt)
782 return ref;
784 if (!psnlim->visit_phi (ref))
785 return NULL_TREE;
787 /* The conservative result of the PHI reflecting the offset and size
788 of the largest PHI argument, regardless of whether or not they all
789 refer to the same object. */
790 access_ref phi_ref;
791 if (pref)
793 /* The identity of the object has not been determined yet but
794 PREF->REF is set by the caller to the PHI for convenience.
795 The size is negative/invalid and the offset is zero (it's
796 updated only after the identity of the object has been
797 established). */
798 gcc_assert (pref->sizrng[0] < 0);
799 gcc_assert (pref->offrng[0] == 0 && pref->offrng[1] == 0);
801 phi_ref = *pref;
804 const unsigned nargs = gimple_phi_num_args (phi_stmt);
805 for (unsigned i = 0; i < nargs; ++i)
807 access_ref phi_arg_ref;
808 bool skip_null = i || i + 1 < nargs;
809 tree arg = gimple_phi_arg_def (phi_stmt, i);
810 if (!phi_ref.merge_ref (all_refs, arg, phi_stmt, ostype, skip_null,
811 *psnlim, *qry))
812 return NULL_TREE;
815 if (phi_ref.sizrng[0] < 0)
817 /* Fail if none of the PHI's arguments resulted in updating PHI_REF
818 (perhaps because they have all been already visited by prior
819 recursive calls). */
820 psnlim->leave_phi (ref);
821 return NULL_TREE;
824 /* Avoid changing *THIS. */
825 if (pref && pref != this)
827 /* Keep the SSA_NAME of the PHI unchanged so that all PHI arguments
828 can be referred to later if necessary. This is useful even if
829 they all refer to the same object. */
830 tree ref = pref->ref;
831 *pref = phi_ref;
832 pref->ref = ref;
835 psnlim->leave_phi (ref);
837 return phi_ref.ref;
840 /* Return the maximum amount of space remaining and if non-null, set
841 argument to the minimum. */
843 offset_int
844 access_ref::size_remaining (offset_int *pmin /* = NULL */) const
846 offset_int minbuf;
847 if (!pmin)
848 pmin = &minbuf;
850 if (sizrng[0] < 0)
852 /* If the identity of the object hasn't been determined return
853 the maximum size range. */
854 *pmin = 0;
855 return wi::to_offset (max_object_size ());
858 /* add_offset() ensures the offset range isn't inverted. */
859 gcc_checking_assert (offrng[0] <= offrng[1]);
861 if (base0)
863 /* The offset into referenced object is zero-based (i.e., it's
864 not referenced by a pointer into middle of some unknown object). */
865 if (offrng[0] < 0 && offrng[1] < 0)
867 /* If the offset is negative the remaining size is zero. */
868 *pmin = 0;
869 return 0;
872 if (sizrng[1] <= offrng[0])
874 /* If the starting offset is greater than or equal to the upper
875 bound on the size of the object, the space remaining is zero.
876 As a special case, if it's equal, set *PMIN to -1 to let
877 the caller know the offset is valid and just past the end. */
878 *pmin = sizrng[1] == offrng[0] ? -1 : 0;
879 return 0;
882 /* Otherwise return the size minus the lower bound of the offset. */
883 offset_int or0 = offrng[0] < 0 ? 0 : offrng[0];
885 *pmin = sizrng[0] - or0;
886 return sizrng[1] - or0;
889 /* The offset to the referenced object isn't zero-based (i.e., it may
890 refer to a byte other than the first. The size of such an object
891 is constrained only by the size of the address space (the result
892 of max_object_size()). */
893 if (sizrng[1] <= offrng[0])
895 *pmin = 0;
896 return 0;
899 offset_int or0 = offrng[0] < 0 ? 0 : offrng[0];
901 *pmin = sizrng[0] - or0;
902 return sizrng[1] - or0;
905 /* Return true if the offset and object size are in range for SIZE. */
907 bool
908 access_ref::offset_in_range (const offset_int &size) const
910 if (size_remaining () < size)
911 return false;
913 if (base0)
914 return offmax[0] >= 0 && offmax[1] <= sizrng[1];
916 offset_int maxoff = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
917 return offmax[0] > -maxoff && offmax[1] < maxoff;
920 /* Add the range [MIN, MAX] to the offset range. For known objects (with
921 zero-based offsets) at least one of whose offset's bounds is in range,
922 constrain the other (or both) to the bounds of the object (i.e., zero
923 and the upper bound of its size). This improves the quality of
924 diagnostics. */
926 void access_ref::add_offset (const offset_int &min, const offset_int &max)
928 if (min <= max)
930 /* To add an ordinary range just add it to the bounds. */
931 offrng[0] += min;
932 offrng[1] += max;
934 else if (!base0)
936 /* To add an inverted range to an offset to an unknown object
937 expand it to the maximum. */
938 add_max_offset ();
939 return;
941 else
943 /* To add an inverted range to an offset to an known object set
944 the upper bound to the maximum representable offset value
945 (which may be greater than MAX_OBJECT_SIZE).
946 The lower bound is either the sum of the current offset and
947 MIN when abs(MAX) is greater than the former, or zero otherwise.
948 Zero because then then inverted range includes the negative of
949 the lower bound. */
950 offset_int maxoff = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
951 offrng[1] = maxoff;
953 if (max >= 0)
955 offrng[0] = 0;
956 if (offmax[0] > 0)
957 offmax[0] = 0;
958 return;
961 offset_int absmax = wi::abs (max);
962 if (offrng[0] < absmax)
964 offrng[0] += min;
965 /* Cap the lower bound at the upper (set to MAXOFF above)
966 to avoid inadvertently recreating an inverted range. */
967 if (offrng[1] < offrng[0])
968 offrng[0] = offrng[1];
970 else
971 offrng[0] = 0;
974 /* Set the minimum and maximmum computed so far. */
975 if (offrng[1] < 0 && offrng[1] < offmax[0])
976 offmax[0] = offrng[1];
977 if (offrng[0] > 0 && offrng[0] > offmax[1])
978 offmax[1] = offrng[0];
980 if (!base0)
981 return;
983 /* When referencing a known object check to see if the offset computed
984 so far is in bounds... */
985 offset_int remrng[2];
986 remrng[1] = size_remaining (remrng);
987 if (remrng[1] > 0 || remrng[0] < 0)
989 /* ...if so, constrain it so that neither bound exceeds the size of
990 the object. Out of bounds offsets are left unchanged, and, for
991 better or worse, become in bounds later. They should be detected
992 and diagnosed at the point they first become invalid by
993 -Warray-bounds. */
994 if (offrng[0] < 0)
995 offrng[0] = 0;
996 if (offrng[1] > sizrng[1])
997 offrng[1] = sizrng[1];
1001 /* Issue one inform message describing each target of an access REF.
1002 WRITE is set for a write access and clear for a read access. */
1004 void
1005 access_ref::inform_access (access_mode mode, int ostype /* = 1 */) const
1007 const access_ref &aref = *this;
1008 if (!aref.ref)
1009 return;
1011 if (phi ())
1013 /* Set MAXREF to refer to the largest object and fill ALL_REFS
1014 with data for all objects referenced by the PHI arguments. */
1015 access_ref maxref;
1016 auto_vec<access_ref> all_refs;
1017 if (!get_ref (&all_refs, &maxref, ostype))
1018 return;
1020 if (all_refs.length ())
1022 /* Except for MAXREF, the rest of the arguments' offsets need not
1023 reflect one added to the PHI itself. Determine the latter from
1024 MAXREF on which the result is based. */
1025 const offset_int orng[] =
1027 offrng[0] - maxref.offrng[0],
1028 wi::smax (offrng[1] - maxref.offrng[1], offrng[0]),
1031 /* Add the final PHI's offset to that of each of the arguments
1032 and recurse to issue an inform message for it. */
1033 for (unsigned i = 0; i != all_refs.length (); ++i)
1035 /* Skip any PHIs; those could lead to infinite recursion. */
1036 if (all_refs[i].phi ())
1037 continue;
1039 all_refs[i].add_offset (orng[0], orng[1]);
1040 all_refs[i].inform_access (mode, ostype);
1042 return;
1046 /* Convert offset range and avoid including a zero range since it
1047 isn't necessarily meaningful. */
1048 HOST_WIDE_INT diff_min = tree_to_shwi (TYPE_MIN_VALUE (ptrdiff_type_node));
1049 HOST_WIDE_INT diff_max = tree_to_shwi (TYPE_MAX_VALUE (ptrdiff_type_node));
1050 HOST_WIDE_INT minoff;
1051 HOST_WIDE_INT maxoff = diff_max;
1052 if (wi::fits_shwi_p (aref.offrng[0]))
1053 minoff = aref.offrng[0].to_shwi ();
1054 else
1055 minoff = aref.offrng[0] < 0 ? diff_min : diff_max;
1057 if (wi::fits_shwi_p (aref.offrng[1]))
1058 maxoff = aref.offrng[1].to_shwi ();
1060 if (maxoff <= diff_min || maxoff >= diff_max)
1061 /* Avoid mentioning an upper bound that's equal to or in excess
1062 of the maximum of ptrdiff_t. */
1063 maxoff = minoff;
1065 /* Convert size range and always include it since all sizes are
1066 meaningful. */
1067 unsigned long long minsize = 0, maxsize = 0;
1068 if (wi::fits_shwi_p (aref.sizrng[0])
1069 && wi::fits_shwi_p (aref.sizrng[1]))
1071 minsize = aref.sizrng[0].to_shwi ();
1072 maxsize = aref.sizrng[1].to_shwi ();
1075 /* SIZRNG doesn't necessarily have the same range as the allocation
1076 size determined by gimple_call_alloc_size (). */
1077 char sizestr[80];
1078 if (minsize == maxsize)
1079 sprintf (sizestr, "%llu", minsize);
1080 else
1081 sprintf (sizestr, "[%llu, %llu]", minsize, maxsize);
1083 char offstr[80];
1084 if (minoff == 0
1085 && (maxoff == 0 || aref.sizrng[1] <= maxoff))
1086 offstr[0] = '\0';
1087 else if (minoff == maxoff)
1088 sprintf (offstr, "%lli", (long long) minoff);
1089 else
1090 sprintf (offstr, "[%lli, %lli]", (long long) minoff, (long long) maxoff);
1092 location_t loc = UNKNOWN_LOCATION;
1094 tree ref = this->ref;
1095 tree allocfn = NULL_TREE;
1096 if (TREE_CODE (ref) == SSA_NAME)
1098 gimple *stmt = SSA_NAME_DEF_STMT (ref);
1099 if (!stmt)
1100 return;
1102 if (is_gimple_call (stmt))
1104 loc = gimple_location (stmt);
1105 if (gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN))
1107 /* Strip the SSA_NAME suffix from the variable name and
1108 recreate an identifier with the VLA's original name. */
1109 ref = gimple_call_lhs (stmt);
1110 if (SSA_NAME_IDENTIFIER (ref))
1112 ref = SSA_NAME_IDENTIFIER (ref);
1113 const char *id = IDENTIFIER_POINTER (ref);
1114 size_t len = strcspn (id, ".$");
1115 if (!len)
1116 len = strlen (id);
1117 ref = get_identifier_with_length (id, len);
1120 else
1122 /* Except for VLAs, retrieve the allocation function. */
1123 allocfn = gimple_call_fndecl (stmt);
1124 if (!allocfn)
1125 allocfn = gimple_call_fn (stmt);
1126 if (TREE_CODE (allocfn) == SSA_NAME)
1128 /* For an ALLOC_CALL via a function pointer make a small
1129 effort to determine the destination of the pointer. */
1130 gimple *def = SSA_NAME_DEF_STMT (allocfn);
1131 if (gimple_assign_single_p (def))
1133 tree rhs = gimple_assign_rhs1 (def);
1134 if (DECL_P (rhs))
1135 allocfn = rhs;
1136 else if (TREE_CODE (rhs) == COMPONENT_REF)
1137 allocfn = TREE_OPERAND (rhs, 1);
1142 else if (gimple_nop_p (stmt))
1143 /* Handle DECL_PARM below. */
1144 ref = SSA_NAME_VAR (ref);
1145 else if (is_gimple_assign (stmt)
1146 && (gimple_assign_rhs_code (stmt) == MIN_EXPR
1147 || gimple_assign_rhs_code (stmt) == MAX_EXPR))
1149 /* MIN or MAX_EXPR here implies a reference to a known object
1150 and either an unknown or distinct one (the latter being
1151 the result of an invalid relational expression). Determine
1152 the identity of the former and point to it in the note.
1153 TODO: Consider merging with PHI handling. */
1154 access_ref arg_ref[2];
1155 tree arg = gimple_assign_rhs1 (stmt);
1156 compute_objsize (arg, /* ostype = */ 1 , &arg_ref[0]);
1157 arg = gimple_assign_rhs2 (stmt);
1158 compute_objsize (arg, /* ostype = */ 1 , &arg_ref[1]);
1160 /* Use the argument that references a known object with more
1161 space remaining. */
1162 const bool idx
1163 = (!arg_ref[0].ref || !arg_ref[0].base0
1164 || (arg_ref[0].base0 && arg_ref[1].base0
1165 && (arg_ref[0].size_remaining ()
1166 < arg_ref[1].size_remaining ())));
1168 arg_ref[idx].offrng[0] = offrng[0];
1169 arg_ref[idx].offrng[1] = offrng[1];
1170 arg_ref[idx].inform_access (mode);
1171 return;
1175 if (DECL_P (ref))
1176 loc = DECL_SOURCE_LOCATION (ref);
1177 else if (EXPR_P (ref) && EXPR_HAS_LOCATION (ref))
1178 loc = EXPR_LOCATION (ref);
1179 else if (TREE_CODE (ref) != IDENTIFIER_NODE
1180 && TREE_CODE (ref) != SSA_NAME)
1181 return;
1183 if (mode == access_read_write || mode == access_write_only)
1185 if (allocfn == NULL_TREE)
1187 if (*offstr)
1188 inform (loc, "at offset %s into destination object %qE of size %s",
1189 offstr, ref, sizestr);
1190 else
1191 inform (loc, "destination object %qE of size %s", ref, sizestr);
1192 return;
1195 if (*offstr)
1196 inform (loc,
1197 "at offset %s into destination object of size %s "
1198 "allocated by %qE", offstr, sizestr, allocfn);
1199 else
1200 inform (loc, "destination object of size %s allocated by %qE",
1201 sizestr, allocfn);
1202 return;
1205 if (mode == access_read_only)
1207 if (allocfn == NULL_TREE)
1209 if (*offstr)
1210 inform (loc, "at offset %s into source object %qE of size %s",
1211 offstr, ref, sizestr);
1212 else
1213 inform (loc, "source object %qE of size %s", ref, sizestr);
1215 return;
1218 if (*offstr)
1219 inform (loc,
1220 "at offset %s into source object of size %s allocated by %qE",
1221 offstr, sizestr, allocfn);
1222 else
1223 inform (loc, "source object of size %s allocated by %qE",
1224 sizestr, allocfn);
1225 return;
1228 if (allocfn == NULL_TREE)
1230 if (*offstr)
1231 inform (loc, "at offset %s into object %qE of size %s",
1232 offstr, ref, sizestr);
1233 else
1234 inform (loc, "object %qE of size %s", ref, sizestr);
1236 return;
1239 if (*offstr)
1240 inform (loc,
1241 "at offset %s into object of size %s allocated by %qE",
1242 offstr, sizestr, allocfn);
1243 else
1244 inform (loc, "object of size %s allocated by %qE",
1245 sizestr, allocfn);
1248 /* Dump *THIS to FILE. */
1250 void
1251 access_ref::dump (FILE *file) const
1253 for (int i = deref; i < 0; ++i)
1254 fputc ('&', file);
1256 for (int i = 0; i < deref; ++i)
1257 fputc ('*', file);
1259 if (gphi *phi_stmt = phi ())
1261 fputs ("PHI <", file);
1262 unsigned nargs = gimple_phi_num_args (phi_stmt);
1263 for (unsigned i = 0; i != nargs; ++i)
1265 tree arg = gimple_phi_arg_def (phi_stmt, i);
1266 print_generic_expr (file, arg);
1267 if (i + 1 < nargs)
1268 fputs (", ", file);
1270 fputc ('>', file);
1272 else
1273 print_generic_expr (file, ref);
1275 if (offrng[0] != offrng[1])
1276 fprintf (file, " + [%lli, %lli]",
1277 (long long) offrng[0].to_shwi (),
1278 (long long) offrng[1].to_shwi ());
1279 else if (offrng[0] != 0)
1280 fprintf (file, " %c %lli",
1281 offrng[0] < 0 ? '-' : '+',
1282 (long long) offrng[0].to_shwi ());
1284 if (base0)
1285 fputs (" (base0)", file);
1287 fputs ("; size: ", file);
1288 if (sizrng[0] != sizrng[1])
1290 offset_int maxsize = wi::to_offset (max_object_size ());
1291 if (sizrng[0] == 0 && sizrng[1] >= maxsize)
1292 fputs ("unknown", file);
1293 else
1294 fprintf (file, "[%llu, %llu]",
1295 (unsigned long long) sizrng[0].to_uhwi (),
1296 (unsigned long long) sizrng[1].to_uhwi ());
1298 else if (sizrng[0] != 0)
1299 fprintf (file, "%llu",
1300 (unsigned long long) sizrng[0].to_uhwi ());
1302 fputc ('\n', file);
1305 /* Set the access to at most MAXWRITE and MAXREAD bytes, and at least 1
1306 when MINWRITE or MINREAD, respectively, is set. */
1307 access_data::access_data (range_query *query, gimple *stmt, access_mode mode,
1308 tree maxwrite /* = NULL_TREE */,
1309 bool minwrite /* = false */,
1310 tree maxread /* = NULL_TREE */,
1311 bool minread /* = false */)
1312 : stmt (stmt), call (), dst (), src (), mode (mode), ostype ()
1314 set_bound (dst_bndrng, maxwrite, minwrite, query, stmt);
1315 set_bound (src_bndrng, maxread, minread, query, stmt);
1318 /* Set the access to at most MAXWRITE and MAXREAD bytes, and at least 1
1319 when MINWRITE or MINREAD, respectively, is set. */
1320 access_data::access_data (range_query *query, tree expr, access_mode mode,
1321 tree maxwrite /* = NULL_TREE */,
1322 bool minwrite /* = false */,
1323 tree maxread /* = NULL_TREE */,
1324 bool minread /* = false */)
1325 : stmt (), call (expr), dst (), src (), mode (mode), ostype ()
1327 set_bound (dst_bndrng, maxwrite, minwrite, query, stmt);
1328 set_bound (src_bndrng, maxread, minread, query, stmt);
1331 /* Set BNDRNG to the range of BOUND for the statement STMT. */
1333 void
1334 access_data::set_bound (offset_int bndrng[2], tree bound, bool minaccess,
1335 range_query *query, gimple *stmt)
1337 /* Set the default bounds of the access and adjust below. */
1338 bndrng[0] = minaccess ? 1 : 0;
1339 bndrng[1] = HOST_WIDE_INT_M1U;
1341 /* When BOUND is nonnull and a range can be extracted from it,
1342 set the bounds of the access to reflect both it and MINACCESS.
1343 BNDRNG[0] is the size of the minimum access. */
1344 tree rng[2];
1345 if (bound && get_size_range (query, bound, stmt, rng, SR_ALLOW_ZERO))
1347 bndrng[0] = wi::to_offset (rng[0]);
1348 bndrng[1] = wi::to_offset (rng[1]);
1349 bndrng[0] = bndrng[0] > 0 && minaccess ? 1 : 0;
1353 /* Set a bit for the PHI in VISITED and return true if it wasn't
1354 already set. */
1356 bool
1357 ssa_name_limit_t::visit_phi (tree ssa_name)
1359 if (!visited)
1360 visited = BITMAP_ALLOC (NULL);
1362 /* Return false if SSA_NAME has already been visited. */
1363 return bitmap_set_bit (visited, SSA_NAME_VERSION (ssa_name));
1366 /* Clear a bit for the PHI in VISITED. */
1368 void
1369 ssa_name_limit_t::leave_phi (tree ssa_name)
1371 /* Return false if SSA_NAME has already been visited. */
1372 bitmap_clear_bit (visited, SSA_NAME_VERSION (ssa_name));
1375 /* Return false if the SSA_NAME chain length counter has reached
1376 the limit, otherwise increment the counter and return true. */
1378 bool
1379 ssa_name_limit_t::next ()
1381 /* Return a negative value to let caller avoid recursing beyond
1382 the specified limit. */
1383 if (ssa_def_max == 0)
1384 return false;
1386 --ssa_def_max;
1387 return true;
1390 /* If the SSA_NAME has already been "seen" return a positive value.
1391 Otherwise add it to VISITED. If the SSA_NAME limit has been
1392 reached, return a negative value. Otherwise return zero. */
1395 ssa_name_limit_t::next_phi (tree ssa_name)
1398 gimple *def_stmt = SSA_NAME_DEF_STMT (ssa_name);
1399 /* Return a positive value if the PHI has already been visited. */
1400 if (gimple_code (def_stmt) == GIMPLE_PHI
1401 && !visit_phi (ssa_name))
1402 return 1;
1405 /* Return a negative value to let caller avoid recursing beyond
1406 the specified limit. */
1407 if (ssa_def_max == 0)
1408 return -1;
1410 --ssa_def_max;
1412 return 0;
1415 ssa_name_limit_t::~ssa_name_limit_t ()
1417 if (visited)
1418 BITMAP_FREE (visited);
1421 /* Default ctor. Initialize object with pointers to the range_query
1422 and cache_type instances to use or null. */
1424 pointer_query::pointer_query (range_query *qry /* = NULL */,
1425 cache_type *cache /* = NULL */)
1426 : rvals (qry), var_cache (cache), hits (), misses (),
1427 failures (), depth (), max_depth ()
1429 /* No op. */
1432 /* Return a pointer to the cached access_ref instance for the SSA_NAME
1433 PTR if it's there or null otherwise. */
1435 const access_ref *
1436 pointer_query::get_ref (tree ptr, int ostype /* = 1 */) const
1438 if (!var_cache)
1440 ++misses;
1441 return NULL;
1444 unsigned version = SSA_NAME_VERSION (ptr);
1445 unsigned idx = version << 1 | (ostype & 1);
1446 if (var_cache->indices.length () <= idx)
1448 ++misses;
1449 return NULL;
1452 unsigned cache_idx = var_cache->indices[idx];
1453 if (var_cache->access_refs.length () <= cache_idx)
1455 ++misses;
1456 return NULL;
1459 access_ref &cache_ref = var_cache->access_refs[cache_idx];
1460 if (cache_ref.ref)
1462 ++hits;
1463 return &cache_ref;
1466 ++misses;
1467 return NULL;
1470 /* Retrieve the access_ref instance for a variable from the cache if it's
1471 there or compute it and insert it into the cache if it's nonnonull. */
1473 bool
1474 pointer_query::get_ref (tree ptr, gimple *stmt, access_ref *pref,
1475 int ostype /* = 1 */)
1477 const unsigned version
1478 = TREE_CODE (ptr) == SSA_NAME ? SSA_NAME_VERSION (ptr) : 0;
1480 if (var_cache && version)
1482 unsigned idx = version << 1 | (ostype & 1);
1483 if (idx < var_cache->indices.length ())
1485 unsigned cache_idx = var_cache->indices[idx] - 1;
1486 if (cache_idx < var_cache->access_refs.length ()
1487 && var_cache->access_refs[cache_idx].ref)
1489 ++hits;
1490 *pref = var_cache->access_refs[cache_idx];
1491 return true;
1495 ++misses;
1498 if (!compute_objsize (ptr, stmt, ostype, pref, this))
1500 ++failures;
1501 return false;
1504 return true;
1507 /* Add a copy of the access_ref REF for the SSA_NAME to the cache if it's
1508 nonnull. */
1510 void
1511 pointer_query::put_ref (tree ptr, const access_ref &ref, int ostype /* = 1 */)
1513 /* Only add populated/valid entries. */
1514 if (!var_cache || !ref.ref || ref.sizrng[0] < 0)
1515 return;
1517 /* Add REF to the two-level cache. */
1518 unsigned version = SSA_NAME_VERSION (ptr);
1519 unsigned idx = version << 1 | (ostype & 1);
1521 /* Grow INDICES if necessary. An index is valid if it's nonzero.
1522 Its value minus one is the index into ACCESS_REFS. Not all
1523 entries are valid. */
1524 if (var_cache->indices.length () <= idx)
1525 var_cache->indices.safe_grow_cleared (idx + 1);
1527 if (!var_cache->indices[idx])
1528 var_cache->indices[idx] = var_cache->access_refs.length () + 1;
1530 /* Grow ACCESS_REF cache if necessary. An entry is valid if its
1531 REF member is nonnull. All entries except for the last two
1532 are valid. Once nonnull, the REF value must stay unchanged. */
1533 unsigned cache_idx = var_cache->indices[idx];
1534 if (var_cache->access_refs.length () <= cache_idx)
1535 var_cache->access_refs.safe_grow_cleared (cache_idx + 1);
1537 access_ref &cache_ref = var_cache->access_refs[cache_idx];
1538 if (cache_ref.ref)
1540 gcc_checking_assert (cache_ref.ref == ref.ref);
1541 return;
1544 cache_ref = ref;
1547 /* Flush the cache if it's nonnull. */
1549 void
1550 pointer_query::flush_cache ()
1552 if (!var_cache)
1553 return;
1554 var_cache->indices.release ();
1555 var_cache->access_refs.release ();
1558 /* Dump statistics and, optionally, cache contents to DUMP_FILE. */
1560 void
1561 pointer_query::dump (FILE *dump_file, bool contents /* = false */)
1563 if (!var_cache)
1564 return;
1566 unsigned nused = 0, nrefs = 0;
1567 unsigned nidxs = var_cache->indices.length ();
1568 for (unsigned i = 0; i != nidxs; ++i)
1570 unsigned ari = var_cache->indices[i];
1571 if (!ari)
1572 continue;
1574 ++nused;
1576 const access_ref &aref = var_cache->access_refs[ari];
1577 if (!aref.ref)
1578 continue;
1580 ++nrefs;
1583 fprintf (dump_file, "pointer_query counters:\n"
1584 " index cache size: %u\n"
1585 " index entries: %u\n"
1586 " access cache size: %u\n"
1587 " access entries: %u\n"
1588 " hits: %u\n"
1589 " misses: %u\n"
1590 " failures: %u\n"
1591 " max_depth: %u\n",
1592 nidxs, nused,
1593 var_cache->access_refs.length (), nrefs,
1594 hits, misses, failures, max_depth);
1596 if (!contents || !nidxs)
1597 return;
1599 fputs ("\npointer_query cache contents:\n", dump_file);
1601 for (unsigned i = 0; i != nidxs; ++i)
1603 unsigned ari = var_cache->indices[i];
1604 if (!ari)
1605 continue;
1607 const access_ref &aref = var_cache->access_refs[ari];
1608 if (!aref.ref)
1609 continue;
1611 /* The level-1 cache index corresponds to the SSA_NAME_VERSION
1612 shifted left by one and ORed with the Object Size Type in
1613 the lowest bit. Print the two separately. */
1614 unsigned ver = i >> 1;
1615 unsigned ost = i & 1;
1617 fprintf (dump_file, " %u.%u[%u]: ", ver, ost, ari);
1618 if (tree name = ssa_name (ver))
1620 print_generic_expr (dump_file, name);
1621 fputs (" = ", dump_file);
1623 else
1624 fprintf (dump_file, " _%u = ", ver);
1626 aref.dump (dump_file);
1629 fputc ('\n', dump_file);
1632 fputs ("\npointer_query cache contents (again):\n", dump_file);
1634 tree var;
1635 unsigned i;
1636 FOR_EACH_SSA_NAME (i, var, cfun)
1638 if (TREE_CODE (TREE_TYPE (var)) != POINTER_TYPE)
1639 continue;
1641 for (unsigned ost = 0; ost != 2; ++ost)
1643 if (const access_ref *cache_ref = get_ref (var, ost))
1645 unsigned ver = SSA_NAME_VERSION (var);
1646 fprintf (dump_file, " %u.%u: ", ver, ost);
1647 if (tree name = ssa_name (ver))
1649 print_generic_expr (dump_file, name);
1650 fputs (" = ", dump_file);
1652 else
1653 fprintf (dump_file, " _%u = ", ver);
1655 cache_ref->dump (dump_file);
1662 /* A helper of compute_objsize_r() to determine the size from an assignment
1663 statement STMT with the RHS of either MIN_EXPR or MAX_EXPR. On success
1664 set PREF->REF to the operand with more or less space remaining,
1665 respectively, if both refer to the same (sub)object, or to PTR if they
1666 might not, and return true. Otherwise, if the identity of neither
1667 operand can be determined, return false. */
1669 static bool
1670 handle_min_max_size (tree ptr, int ostype, access_ref *pref,
1671 ssa_name_limit_t &snlim, pointer_query *qry)
1673 gimple *stmt = SSA_NAME_DEF_STMT (ptr);
1674 const tree_code code = gimple_assign_rhs_code (stmt);
1676 /* In a valid MAX_/MIN_EXPR both operands must refer to the same array.
1677 Determine the size/offset of each and use the one with more or less
1678 space remaining, respectively. If either fails, use the information
1679 determined from the other instead, adjusted up or down as appropriate
1680 for the expression. */
1681 access_ref aref[2] = { *pref, *pref };
1682 tree arg1 = gimple_assign_rhs1 (stmt);
1683 if (!compute_objsize_r (arg1, stmt, false, ostype, &aref[0], snlim, qry))
1685 aref[0].base0 = false;
1686 aref[0].offrng[0] = aref[0].offrng[1] = 0;
1687 aref[0].add_max_offset ();
1688 aref[0].set_max_size_range ();
1691 tree arg2 = gimple_assign_rhs2 (stmt);
1692 if (!compute_objsize_r (arg2, stmt, false, ostype, &aref[1], snlim, qry))
1694 aref[1].base0 = false;
1695 aref[1].offrng[0] = aref[1].offrng[1] = 0;
1696 aref[1].add_max_offset ();
1697 aref[1].set_max_size_range ();
1700 if (!aref[0].ref && !aref[1].ref)
1701 /* Fail if the identity of neither argument could be determined. */
1702 return false;
1704 bool i0 = false;
1705 if (aref[0].ref && aref[0].base0)
1707 if (aref[1].ref && aref[1].base0)
1709 /* If the object referenced by both arguments has been determined
1710 set *PREF to the one with more or less space remainng, whichever
1711 is appopriate for CODE.
1712 TODO: Indicate when the objects are distinct so it can be
1713 diagnosed. */
1714 i0 = code == MAX_EXPR;
1715 const bool i1 = !i0;
1717 if (aref[i0].size_remaining () < aref[i1].size_remaining ())
1718 *pref = aref[i1];
1719 else
1720 *pref = aref[i0];
1722 if (aref[i0].ref != aref[i1].ref)
1723 /* If the operands don't refer to the same (sub)object set
1724 PREF->REF to the SSA_NAME from which STMT was obtained
1725 so that both can be identified in a diagnostic. */
1726 pref->ref = ptr;
1728 return true;
1731 /* If only the object referenced by one of the arguments could be
1732 determined, use it and... */
1733 *pref = aref[0];
1734 i0 = true;
1736 else
1737 *pref = aref[1];
1739 const bool i1 = !i0;
1740 /* ...see if the offset obtained from the other pointer can be used
1741 to tighten up the bound on the offset obtained from the first. */
1742 if ((code == MAX_EXPR && aref[i1].offrng[1] < aref[i0].offrng[0])
1743 || (code == MIN_EXPR && aref[i0].offrng[0] < aref[i1].offrng[1]))
1745 pref->offrng[0] = aref[i0].offrng[0];
1746 pref->offrng[1] = aref[i0].offrng[1];
1749 /* Replace PTR->REF with the SSA_NAME to indicate the expression
1750 might not refer to the same (sub)object. */
1751 pref->ref = ptr;
1752 return true;
1755 /* A helper of compute_objsize_r() to determine the size of a DECL.
1756 Return true on success and (possibly in the future) false on failure. */
1758 static bool
1759 handle_decl (tree decl, bool addr, access_ref *pref)
1761 tree decl_type = TREE_TYPE (decl);
1763 pref->ref = decl;
1765 /* Reset the offset in case it was set by a prior call and not
1766 cleared by the caller. The offset is only adjusted after
1767 the identity of the object has been determined. */
1768 pref->offrng[0] = pref->offrng[1] = 0;
1770 if (!addr && POINTER_TYPE_P (decl_type))
1772 /* Set the maximum size if the reference is to the pointer
1773 itself (as opposed to what it points to), and clear
1774 BASE0 since the offset isn't necessarily zero-based. */
1775 pref->set_max_size_range ();
1776 pref->base0 = false;
1777 return true;
1780 /* Valid offsets into the object are nonnegative. */
1781 pref->base0 = true;
1783 if (tree size = decl_init_size (decl, false))
1784 if (TREE_CODE (size) == INTEGER_CST)
1786 pref->sizrng[0] = wi::to_offset (size);
1787 pref->sizrng[1] = pref->sizrng[0];
1788 return true;
1791 pref->set_max_size_range ();
1792 return true;
1795 /* A helper of compute_objsize_r() to determine the size from ARRAY_REF
1796 AREF. ADDR is true if PTR is the operand of ADDR_EXPR. Return true
1797 on success and false on failure. */
1799 static bool
1800 handle_array_ref (tree aref, gimple *stmt, bool addr, int ostype,
1801 access_ref *pref, ssa_name_limit_t &snlim,
1802 pointer_query *qry)
1804 gcc_assert (TREE_CODE (aref) == ARRAY_REF);
1806 tree arefop = TREE_OPERAND (aref, 0);
1807 tree reftype = TREE_TYPE (arefop);
1808 if (!addr && TREE_CODE (TREE_TYPE (reftype)) == POINTER_TYPE)
1809 /* Avoid arrays of pointers. FIXME: Hande pointers to arrays
1810 of known bound. */
1811 return false;
1813 if (!compute_objsize_r (arefop, stmt, addr, ostype, pref, snlim, qry))
1814 return false;
1816 offset_int orng[2];
1817 tree off = pref->eval (TREE_OPERAND (aref, 1));
1818 range_query *const rvals = qry ? qry->rvals : NULL;
1819 if (!get_offset_range (off, stmt, orng, rvals))
1821 /* Set ORNG to the maximum offset representable in ptrdiff_t. */
1822 orng[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
1823 orng[0] = -orng[1] - 1;
1826 /* Convert the array index range determined above to a byte
1827 offset. */
1828 tree lowbnd = array_ref_low_bound (aref);
1829 if (!integer_zerop (lowbnd) && tree_fits_uhwi_p (lowbnd))
1831 /* Adjust the index by the low bound of the array domain
1832 (normally zero but 1 in Fortran). */
1833 unsigned HOST_WIDE_INT lb = tree_to_uhwi (lowbnd);
1834 orng[0] -= lb;
1835 orng[1] -= lb;
1838 tree eltype = TREE_TYPE (aref);
1839 tree tpsize = TYPE_SIZE_UNIT (eltype);
1840 if (!tpsize || TREE_CODE (tpsize) != INTEGER_CST)
1842 pref->add_max_offset ();
1843 return true;
1846 offset_int sz = wi::to_offset (tpsize);
1847 orng[0] *= sz;
1848 orng[1] *= sz;
1850 if (ostype && TREE_CODE (eltype) == ARRAY_TYPE)
1852 /* Except for the permissive raw memory functions which use
1853 the size of the whole object determined above, use the size
1854 of the referenced array. Because the overall offset is from
1855 the beginning of the complete array object add this overall
1856 offset to the size of array. */
1857 offset_int sizrng[2] =
1859 pref->offrng[0] + orng[0] + sz,
1860 pref->offrng[1] + orng[1] + sz
1862 if (sizrng[1] < sizrng[0])
1863 std::swap (sizrng[0], sizrng[1]);
1864 if (sizrng[0] >= 0 && sizrng[0] <= pref->sizrng[0])
1865 pref->sizrng[0] = sizrng[0];
1866 if (sizrng[1] >= 0 && sizrng[1] <= pref->sizrng[1])
1867 pref->sizrng[1] = sizrng[1];
1870 pref->add_offset (orng[0], orng[1]);
1871 return true;
1874 /* Given a COMPONENT_REF CREF, set *PREF size to the size of the referenced
1875 member. */
1877 static void
1878 set_component_ref_size (tree cref, access_ref *pref)
1880 const tree base = TREE_OPERAND (cref, 0);
1881 const tree base_type = TREE_TYPE (base);
1883 /* SAM is set for array members that might need special treatment. */
1884 special_array_member sam;
1885 tree size = component_ref_size (cref, &sam);
1886 if (sam == special_array_member::int_0)
1887 pref->sizrng[0] = pref->sizrng[1] = 0;
1888 else if (!pref->trail1special && sam == special_array_member::trail_1)
1889 pref->sizrng[0] = pref->sizrng[1] = 1;
1890 else if (size && TREE_CODE (size) == INTEGER_CST)
1891 pref->sizrng[0] = pref->sizrng[1] = wi::to_offset (size);
1892 else
1894 /* When the size of the member is unknown it's either a flexible
1895 array member or a trailing special array member (either zero
1896 length or one-element). Set the size to the maximum minus
1897 the constant size of the base object's type. */
1898 pref->sizrng[0] = 0;
1899 pref->sizrng[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
1900 if (tree base_size = TYPE_SIZE_UNIT (base_type))
1901 if (TREE_CODE (base_size) == INTEGER_CST)
1902 pref->sizrng[1] -= wi::to_offset (base_size);
1906 /* A helper of compute_objsize_r() to determine the size from COMPONENT_REF
1907 CREF. Return true on success and false on failure. */
1909 static bool
1910 handle_component_ref (tree cref, gimple *stmt, bool addr, int ostype,
1911 access_ref *pref, ssa_name_limit_t &snlim,
1912 pointer_query *qry)
1914 gcc_assert (TREE_CODE (cref) == COMPONENT_REF);
1916 const tree base = TREE_OPERAND (cref, 0);
1917 const tree base_type = TREE_TYPE (base);
1918 if (TREE_CODE (base_type) == UNION_TYPE)
1919 /* In accesses through union types consider the entire unions
1920 rather than just their members. */
1921 ostype = 0;
1923 tree field = TREE_OPERAND (cref, 1);
1925 if (ostype == 0)
1927 /* In OSTYPE zero (for raw memory functions like memcpy), use
1928 the maximum size instead if the identity of the enclosing
1929 object cannot be determined. */
1930 if (!compute_objsize_r (base, stmt, addr, ostype, pref, snlim, qry))
1931 return false;
1933 /* Otherwise, use the size of the enclosing object and add
1934 the offset of the member to the offset computed so far. */
1935 tree offset = byte_position (field);
1936 if (TREE_CODE (offset) == INTEGER_CST)
1937 pref->add_offset (wi::to_offset (offset));
1938 else
1939 pref->add_max_offset ();
1941 if (!pref->ref)
1942 /* PREF->REF may have been already set to an SSA_NAME earlier
1943 to provide better context for diagnostics. In that case,
1944 leave it unchanged. */
1945 pref->ref = base;
1947 return true;
1950 pref->ref = field;
1952 if (!addr && POINTER_TYPE_P (TREE_TYPE (field)))
1954 /* Set maximum size if the reference is to the pointer member
1955 itself (as opposed to what it points to). */
1956 pref->set_max_size_range ();
1957 return true;
1960 set_component_ref_size (cref, pref);
1961 return true;
1964 /* A helper of compute_objsize_r() to determine the size from MEM_REF
1965 MREF. Return true on success and false on failure. */
1967 static bool
1968 handle_mem_ref (tree mref, gimple *stmt, int ostype, access_ref *pref,
1969 ssa_name_limit_t &snlim, pointer_query *qry)
1971 gcc_assert (TREE_CODE (mref) == MEM_REF);
1973 tree mreftype = TYPE_MAIN_VARIANT (TREE_TYPE (mref));
1974 if (VECTOR_TYPE_P (mreftype))
1976 /* Hack: Handle MEM_REFs of vector types as those to complete
1977 objects; those may be synthesized from multiple assignments
1978 to consecutive data members (see PR 93200 and 96963).
1979 FIXME: Vectorized assignments should only be present after
1980 vectorization so this hack is only necessary after it has
1981 run and could be avoided in calls from prior passes (e.g.,
1982 tree-ssa-strlen.c).
1983 FIXME: Deal with this more generally, e.g., by marking up
1984 such MEM_REFs at the time they're created. */
1985 ostype = 0;
1988 tree mrefop = TREE_OPERAND (mref, 0);
1989 if (!compute_objsize_r (mrefop, stmt, false, ostype, pref, snlim, qry))
1990 return false;
1992 ++pref->deref;
1994 offset_int orng[2];
1995 tree off = pref->eval (TREE_OPERAND (mref, 1));
1996 range_query *const rvals = qry ? qry->rvals : NULL;
1997 if (!get_offset_range (off, stmt, orng, rvals))
1999 /* Set ORNG to the maximum offset representable in ptrdiff_t. */
2000 orng[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
2001 orng[0] = -orng[1] - 1;
2004 pref->add_offset (orng[0], orng[1]);
2005 return true;
2008 /* A helper of compute_objsize_r() to determine the size from SSA_NAME
2009 PTR. Return true on success and false on failure. */
2011 static bool
2012 handle_ssa_name (tree ptr, bool addr, int ostype,
2013 access_ref *pref, ssa_name_limit_t &snlim,
2014 pointer_query *qry)
2016 if (!snlim.next ())
2017 return false;
2019 /* Only process an SSA_NAME if the recursion limit has not yet
2020 been reached. */
2021 if (qry)
2023 if (++qry->depth > qry->max_depth)
2024 qry->max_depth = qry->depth;
2025 if (const access_ref *cache_ref = qry->get_ref (ptr, ostype))
2027 /* Add the number of DEREFerences accummulated so far. */
2028 const int deref = pref->deref;
2029 *pref = *cache_ref;
2030 pref->deref += deref;
2031 return true;
2035 gimple *stmt = SSA_NAME_DEF_STMT (ptr);
2036 if (is_gimple_call (stmt))
2038 /* If STMT is a call to an allocation function get the size
2039 from its argument(s). If successful, also set *PREF->REF
2040 to PTR for the caller to include in diagnostics. */
2041 wide_int wr[2];
2042 range_query *const rvals = qry ? qry->rvals : NULL;
2043 if (gimple_call_alloc_size (stmt, wr, rvals))
2045 pref->ref = ptr;
2046 pref->sizrng[0] = offset_int::from (wr[0], UNSIGNED);
2047 pref->sizrng[1] = offset_int::from (wr[1], UNSIGNED);
2048 /* Constrain both bounds to a valid size. */
2049 offset_int maxsize = wi::to_offset (max_object_size ());
2050 if (pref->sizrng[0] > maxsize)
2051 pref->sizrng[0] = maxsize;
2052 if (pref->sizrng[1] > maxsize)
2053 pref->sizrng[1] = maxsize;
2055 else
2057 /* For functions known to return one of their pointer arguments
2058 try to determine what the returned pointer points to, and on
2059 success add OFFRNG which was set to the offset added by
2060 the function (e.g., memchr) to the overall offset. */
2061 bool past_end;
2062 offset_int offrng[2];
2063 if (tree ret = gimple_call_return_array (stmt, offrng, &past_end,
2064 snlim, qry))
2066 if (!compute_objsize_r (ret, stmt, addr, ostype, pref, snlim, qry))
2067 return false;
2069 /* Cap OFFRNG[1] to at most the remaining size of
2070 the object. */
2071 offset_int remrng[2];
2072 remrng[1] = pref->size_remaining (remrng);
2073 if (remrng[1] != 0 && !past_end)
2074 /* Decrement the size for functions that never return
2075 a past-the-end pointer. */
2076 remrng[1] -= 1;
2078 if (remrng[1] < offrng[1])
2079 offrng[1] = remrng[1];
2080 pref->add_offset (offrng[0], offrng[1]);
2082 else
2084 /* For other calls that might return arbitrary pointers
2085 including into the middle of objects set the size
2086 range to maximum, clear PREF->BASE0, and also set
2087 PREF->REF to include in diagnostics. */
2088 pref->set_max_size_range ();
2089 pref->base0 = false;
2090 pref->ref = ptr;
2093 qry->put_ref (ptr, *pref, ostype);
2094 return true;
2097 if (gimple_nop_p (stmt))
2099 /* For a function argument try to determine the byte size
2100 of the array from the current function declaratation
2101 (e.g., attribute access or related). */
2102 wide_int wr[2];
2103 bool static_array = false;
2104 if (tree ref = gimple_parm_array_size (ptr, wr, &static_array))
2106 pref->parmarray = !static_array;
2107 pref->sizrng[0] = offset_int::from (wr[0], UNSIGNED);
2108 pref->sizrng[1] = offset_int::from (wr[1], UNSIGNED);
2109 pref->ref = ref;
2110 qry->put_ref (ptr, *pref, ostype);
2111 return true;
2114 pref->set_max_size_range ();
2115 pref->base0 = false;
2116 pref->ref = ptr;
2117 qry->put_ref (ptr, *pref, ostype);
2118 return true;
2121 if (gimple_code (stmt) == GIMPLE_PHI)
2123 /* Pass PTR to get_ref() via PREF. If all PHI arguments refer
2124 to the same object the function will replace it with it. */
2125 pref->ref = ptr;
2126 access_ref phi_ref = *pref;
2127 if (!pref->get_ref (NULL, &phi_ref, ostype, &snlim, qry))
2128 return false;
2129 *pref = phi_ref;
2130 qry->put_ref (ptr, *pref, ostype);
2131 return true;
2134 if (!is_gimple_assign (stmt))
2136 /* Clear BASE0 since the assigned pointer might point into
2137 the middle of the object, set the maximum size range and,
2138 if the SSA_NAME refers to a function argumnent, set
2139 PREF->REF to it. */
2140 pref->base0 = false;
2141 pref->set_max_size_range ();
2142 pref->ref = ptr;
2143 return true;
2146 tree_code code = gimple_assign_rhs_code (stmt);
2148 if (code == MAX_EXPR || code == MIN_EXPR)
2150 if (!handle_min_max_size (ptr, ostype, pref, snlim, qry))
2151 return false;
2153 qry->put_ref (ptr, *pref, ostype);
2154 return true;
2157 tree rhs = gimple_assign_rhs1 (stmt);
2159 if (code == ASSERT_EXPR)
2161 rhs = TREE_OPERAND (rhs, 0);
2162 return compute_objsize_r (rhs, stmt, addr, ostype, pref, snlim, qry);
2165 if (code == POINTER_PLUS_EXPR
2166 && TREE_CODE (TREE_TYPE (rhs)) == POINTER_TYPE)
2168 /* Compute the size of the object first. */
2169 if (!compute_objsize_r (rhs, stmt, addr, ostype, pref, snlim, qry))
2170 return false;
2172 offset_int orng[2];
2173 tree off = gimple_assign_rhs2 (stmt);
2174 range_query *const rvals = qry ? qry->rvals : NULL;
2175 if (get_offset_range (off, stmt, orng, rvals))
2176 pref->add_offset (orng[0], orng[1]);
2177 else
2178 pref->add_max_offset ();
2180 qry->put_ref (ptr, *pref, ostype);
2181 return true;
2184 if (code == ADDR_EXPR || code == SSA_NAME)
2186 if (!compute_objsize_r (rhs, stmt, addr, ostype, pref, snlim, qry))
2187 return false;
2188 qry->put_ref (ptr, *pref, ostype);
2189 return true;
2192 if (ostype > 1 && POINTER_TYPE_P (TREE_TYPE (rhs)))
2194 /* When determining the qualifiers follow the pointer but
2195 avoid caching the result. As the pointer is added to
2196 and/or dereferenced the computed size and offset need
2197 not be meaningful for other queries involving the same
2198 pointer. */
2199 if (!compute_objsize_r (rhs, stmt, addr, ostype, pref, snlim, qry))
2200 return false;
2202 rhs = pref->ref;
2205 /* (This could also be an assignment from a nonlocal pointer.) Save
2206 PTR to mention in diagnostics but otherwise treat it as a pointer
2207 to an unknown object. */
2208 pref->ref = rhs;
2209 pref->base0 = false;
2210 pref->set_max_size_range ();
2211 return true;
2214 /* Helper to compute the size of the object referenced by the PTR
2215 expression which must have pointer type, using Object Size type
2216 OSTYPE (only the least significant 2 bits are used).
2217 On success, sets PREF->REF to the DECL of the referenced object
2218 if it's unique, otherwise to null, PREF->OFFRNG to the range of
2219 offsets into it, and PREF->SIZRNG to the range of sizes of
2220 the object(s).
2221 ADDR is true for an enclosing ADDR_EXPR.
2222 SNLIM is used to avoid visiting the same PHI operand multiple
2223 times, and, when nonnull, RVALS to determine range information.
2224 Returns true on success, false when a meaningful size (or range)
2225 cannot be determined.
2227 The function is intended for diagnostics and should not be used
2228 to influence code generation or optimization. */
2230 static bool
2231 compute_objsize_r (tree ptr, gimple *stmt, bool addr, int ostype,
2232 access_ref *pref, ssa_name_limit_t &snlim,
2233 pointer_query *qry)
2235 STRIP_NOPS (ptr);
2237 if (DECL_P (ptr))
2238 return handle_decl (ptr, addr, pref);
2240 switch (TREE_CODE (ptr))
2242 case ADDR_EXPR:
2244 tree ref = TREE_OPERAND (ptr, 0);
2245 if (!compute_objsize_r (ref, stmt, true, ostype, pref, snlim, qry))
2246 return false;
2248 --pref->deref;
2249 return true;
2252 case BIT_FIELD_REF:
2254 tree ref = TREE_OPERAND (ptr, 0);
2255 if (!compute_objsize_r (ref, stmt, addr, ostype, pref, snlim, qry))
2256 return false;
2258 offset_int off = wi::to_offset (pref->eval (TREE_OPERAND (ptr, 2)));
2259 pref->add_offset (off / BITS_PER_UNIT);
2260 return true;
2263 case ARRAY_REF:
2264 return handle_array_ref (ptr, stmt, addr, ostype, pref, snlim, qry);
2266 case COMPONENT_REF:
2267 return handle_component_ref (ptr, stmt, addr, ostype, pref, snlim, qry);
2269 case MEM_REF:
2270 return handle_mem_ref (ptr, stmt, ostype, pref, snlim, qry);
2272 case TARGET_MEM_REF:
2274 tree ref = TREE_OPERAND (ptr, 0);
2275 if (!compute_objsize_r (ref, stmt, addr, ostype, pref, snlim, qry))
2276 return false;
2278 /* TODO: Handle remaining operands. Until then, add maximum offset. */
2279 pref->ref = ptr;
2280 pref->add_max_offset ();
2281 return true;
2284 case INTEGER_CST:
2285 /* Pointer constants other than null are most likely the result
2286 of erroneous null pointer addition/subtraction. Unless zero
2287 is a valid address set size to zero. For null pointers, set
2288 size to the maximum for now since those may be the result of
2289 jump threading. */
2290 if (integer_zerop (ptr))
2291 pref->set_max_size_range ();
2292 else if (POINTER_TYPE_P (TREE_TYPE (ptr)))
2294 tree deref_type = TREE_TYPE (TREE_TYPE (ptr));
2295 addr_space_t as = TYPE_ADDR_SPACE (deref_type);
2296 if (targetm.addr_space.zero_address_valid (as))
2297 pref->set_max_size_range ();
2298 else
2299 pref->sizrng[0] = pref->sizrng[1] = 0;
2301 else
2302 pref->sizrng[0] = pref->sizrng[1] = 0;
2304 pref->ref = ptr;
2305 return true;
2307 case STRING_CST:
2308 pref->sizrng[0] = pref->sizrng[1] = TREE_STRING_LENGTH (ptr);
2309 pref->ref = ptr;
2310 return true;
2312 case POINTER_PLUS_EXPR:
2314 tree ref = TREE_OPERAND (ptr, 0);
2315 if (!compute_objsize_r (ref, stmt, addr, ostype, pref, snlim, qry))
2316 return false;
2318 /* Clear DEREF since the offset is being applied to the target
2319 of the dereference. */
2320 pref->deref = 0;
2322 offset_int orng[2];
2323 tree off = pref->eval (TREE_OPERAND (ptr, 1));
2324 if (get_offset_range (off, stmt, orng, qry->rvals))
2325 pref->add_offset (orng[0], orng[1]);
2326 else
2327 pref->add_max_offset ();
2328 return true;
2331 case VIEW_CONVERT_EXPR:
2332 ptr = TREE_OPERAND (ptr, 0);
2333 return compute_objsize_r (ptr, stmt, addr, ostype, pref, snlim, qry);
2335 case SSA_NAME:
2336 return handle_ssa_name (ptr, addr, ostype, pref, snlim, qry);
2338 default:
2339 break;
2342 /* Assume all other expressions point into an unknown object
2343 of the maximum valid size. */
2344 pref->ref = ptr;
2345 pref->base0 = false;
2346 pref->set_max_size_range ();
2347 if (TREE_CODE (ptr) == SSA_NAME)
2348 qry->put_ref (ptr, *pref);
2349 return true;
2352 /* A "public" wrapper around the above. Clients should use this overload
2353 instead. */
2355 tree
2356 compute_objsize (tree ptr, gimple *stmt, int ostype, access_ref *pref,
2357 pointer_query *ptr_qry)
2359 pointer_query qry;
2360 if (ptr_qry)
2361 ptr_qry->depth = 0;
2362 else
2363 ptr_qry = &qry;
2365 /* Clear and invalidate in case *PREF is being reused. */
2366 pref->offrng[0] = pref->offrng[1] = 0;
2367 pref->sizrng[0] = pref->sizrng[1] = -1;
2369 ssa_name_limit_t snlim;
2370 if (!compute_objsize_r (ptr, stmt, false, ostype, pref, snlim, ptr_qry))
2371 return NULL_TREE;
2373 offset_int maxsize = pref->size_remaining ();
2374 if (pref->base0 && pref->offrng[0] < 0 && pref->offrng[1] >= 0)
2375 pref->offrng[0] = 0;
2376 return wide_int_to_tree (sizetype, maxsize);
2379 /* Transitional wrapper. The function should be removed once callers
2380 transition to the pointer_query API. */
2382 tree
2383 compute_objsize (tree ptr, gimple *stmt, int ostype, access_ref *pref,
2384 range_query *rvals /* = NULL */)
2386 pointer_query qry;
2387 qry.rvals = rvals;
2388 return compute_objsize (ptr, stmt, ostype, pref, &qry);
2391 /* Legacy wrapper around the above. The function should be removed
2392 once callers transition to one of the two above. */
2394 tree
2395 compute_objsize (tree ptr, gimple *stmt, int ostype, tree *pdecl /* = NULL */,
2396 tree *poff /* = NULL */, range_query *rvals /* = NULL */)
2398 /* Set the initial offsets to zero and size to negative to indicate
2399 none has been computed yet. */
2400 access_ref ref;
2401 tree size = compute_objsize (ptr, stmt, ostype, &ref, rvals);
2402 if (!size || !ref.base0)
2403 return NULL_TREE;
2405 if (pdecl)
2406 *pdecl = ref.ref;
2408 if (poff)
2409 *poff = wide_int_to_tree (ptrdiff_type_node, ref.offrng[ref.offrng[0] < 0]);
2411 return size;
2414 /* Determine the offset *FLDOFF of the first byte of a struct member
2415 of TYPE (possibly recursively) into which the byte offset OFF points,
2416 starting after the field START_AFTER if it's non-null. On success,
2417 if nonnull, set *FLDOFF to the offset of the first byte, and return
2418 the field decl. If nonnull, set *NEXTOFF to the offset of the next
2419 field (which reflects any padding between the returned field and
2420 the next). Otherwise, if no such member can be found, return null. */
2422 tree
2423 field_at_offset (tree type, tree start_after, HOST_WIDE_INT off,
2424 HOST_WIDE_INT *fldoff /* = nullptr */,
2425 HOST_WIDE_INT *nextoff /* = nullptr */)
2427 tree first_fld = TYPE_FIELDS (type);
2429 HOST_WIDE_INT offbuf = 0, nextbuf = 0;
2430 if (!fldoff)
2431 fldoff = &offbuf;
2432 if (!nextoff)
2433 nextoff = &nextbuf;
2435 *nextoff = 0;
2437 /* The field to return. */
2438 tree last_fld = NULL_TREE;
2439 /* The next field to advance to. */
2440 tree next_fld = NULL_TREE;
2442 /* NEXT_FLD's cached offset. */
2443 HOST_WIDE_INT next_pos = -1;
2445 for (tree fld = first_fld; fld; fld = next_fld)
2447 next_fld = fld;
2449 /* Advance to the next relevant data member. */
2450 next_fld = TREE_CHAIN (next_fld);
2451 while (next_fld
2452 && (TREE_CODE (next_fld) != FIELD_DECL
2453 || DECL_ARTIFICIAL (next_fld)));
2455 if (TREE_CODE (fld) != FIELD_DECL || DECL_ARTIFICIAL (fld))
2456 continue;
2458 if (fld == start_after)
2459 continue;
2461 tree fldtype = TREE_TYPE (fld);
2462 /* The offset of FLD within its immediately enclosing structure. */
2463 HOST_WIDE_INT fldpos = next_pos < 0 ? int_byte_position (fld) : next_pos;
2465 /* If the size is not available the field is a flexible array
2466 member. Treat this case as success. */
2467 tree typesize = TYPE_SIZE_UNIT (fldtype);
2468 HOST_WIDE_INT fldsize = (tree_fits_uhwi_p (typesize)
2469 ? tree_to_uhwi (typesize)
2470 : off);
2472 /* If OFF is beyond the end of the current field continue. */
2473 HOST_WIDE_INT fldend = fldpos + fldsize;
2474 if (fldend < off)
2475 continue;
2477 if (next_fld)
2479 /* If OFF is equal to the offset of the next field continue
2480 to it and skip the array/struct business below. */
2481 next_pos = int_byte_position (next_fld);
2482 *nextoff = *fldoff + next_pos;
2483 if (*nextoff == off && TREE_CODE (type) != UNION_TYPE)
2484 continue;
2486 else
2487 *nextoff = HOST_WIDE_INT_MAX;
2489 /* OFF refers somewhere into the current field or just past its end,
2490 which could mean it refers to the next field. */
2491 if (TREE_CODE (fldtype) == ARRAY_TYPE)
2493 /* Will be set to the offset of the first byte of the array
2494 element (which may be an array) of FLDTYPE into which
2495 OFF - FLDPOS points (which may be past ELTOFF). */
2496 HOST_WIDE_INT eltoff = 0;
2497 if (tree ft = array_elt_at_offset (fldtype, off - fldpos, &eltoff))
2498 fldtype = ft;
2499 else
2500 continue;
2502 /* Advance the position to include the array element above.
2503 If OFF - FLPOS refers to a member of FLDTYPE, the member
2504 will be determined below. */
2505 fldpos += eltoff;
2508 *fldoff += fldpos;
2510 if (TREE_CODE (fldtype) == RECORD_TYPE)
2511 /* Drill down into the current field if it's a struct. */
2512 fld = field_at_offset (fldtype, start_after, off - fldpos,
2513 fldoff, nextoff);
2515 last_fld = fld;
2517 /* Unless the offset is just past the end of the field return it.
2518 Otherwise save it and return it only if the offset of the next
2519 next field is greater (i.e., there is padding between the two)
2520 or if there is no next field. */
2521 if (off < fldend)
2522 break;
2525 if (*nextoff == HOST_WIDE_INT_MAX && next_fld)
2526 *nextoff = next_pos;
2528 return last_fld;
2531 /* Determine the offset *ELTOFF of the first byte of the array element
2532 of array ARTYPE into which the byte offset OFF points. On success
2533 set *ELTOFF to the offset of the first byte and return type.
2534 Otherwise, if no such element can be found, return null. */
2536 tree
2537 array_elt_at_offset (tree artype, HOST_WIDE_INT off,
2538 HOST_WIDE_INT *eltoff /* = nullptr */,
2539 HOST_WIDE_INT *subar_size /* = nullptr */)
2541 gcc_assert (TREE_CODE (artype) == ARRAY_TYPE);
2543 HOST_WIDE_INT dummy;
2544 if (!eltoff)
2545 eltoff = &dummy;
2546 if (!subar_size)
2547 subar_size = &dummy;
2549 tree eltype = artype;
2550 while (TREE_CODE (TREE_TYPE (eltype)) == ARRAY_TYPE)
2551 eltype = TREE_TYPE (eltype);
2553 tree subartype = eltype;
2554 if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (eltype))
2555 || TYPE_MODE (TREE_TYPE (eltype)) != TYPE_MODE (char_type_node))
2556 eltype = TREE_TYPE (eltype);
2558 *subar_size = int_size_in_bytes (subartype);
2560 if (eltype == artype)
2562 *eltoff = 0;
2563 return artype;
2566 HOST_WIDE_INT artype_size = int_size_in_bytes (artype);
2567 HOST_WIDE_INT eltype_size = int_size_in_bytes (eltype);
2569 if (off < artype_size)// * eltype_size)
2571 *eltoff = (off / eltype_size) * eltype_size;
2572 return TREE_CODE (eltype) == ARRAY_TYPE ? TREE_TYPE (eltype) : eltype;
2575 return NULL_TREE;
2578 /* Wrapper around build_array_type_nelts that makes sure the array
2579 can be created at all and handles zero sized arrays specially. */
2581 tree
2582 build_printable_array_type (tree eltype, unsigned HOST_WIDE_INT nelts)
2584 if (TYPE_SIZE_UNIT (eltype)
2585 && TREE_CODE (TYPE_SIZE_UNIT (eltype)) == INTEGER_CST
2586 && !integer_zerop (TYPE_SIZE_UNIT (eltype))
2587 && TYPE_ALIGN_UNIT (eltype) > 1
2588 && wi::zext (wi::to_wide (TYPE_SIZE_UNIT (eltype)),
2589 ffs_hwi (TYPE_ALIGN_UNIT (eltype)) - 1) != 0)
2590 eltype = TYPE_MAIN_VARIANT (eltype);
2592 /* Consider excessive NELTS an array of unknown bound. */
2593 tree idxtype = NULL_TREE;
2594 if (nelts < HOST_WIDE_INT_MAX)
2596 if (nelts)
2597 return build_array_type_nelts (eltype, nelts);
2598 idxtype = build_range_type (sizetype, size_zero_node, NULL_TREE);
2601 tree arrtype = build_array_type (eltype, idxtype);
2602 arrtype = build_distinct_type_copy (TYPE_MAIN_VARIANT (arrtype));
2603 TYPE_SIZE (arrtype) = bitsize_zero_node;
2604 TYPE_SIZE_UNIT (arrtype) = size_zero_node;
2605 return arrtype;