Fix gnat.dg/opt39.adb on hppa.
[official-gcc.git] / gcc / pointer-query.cc
blob5b05e9bedf88ffc74164d699611765bf8e17689e
1 /* Definitions of the pointer_query and related classes.
3 Copyright (C) 2020-2023 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "stringpool.h"
28 #include "tree-vrp.h"
29 #include "diagnostic-core.h"
30 #include "fold-const.h"
31 #include "tree-object-size.h"
32 #include "tree-ssa-strlen.h"
33 #include "langhooks.h"
34 #include "stringpool.h"
35 #include "attribs.h"
36 #include "gimple-iterator.h"
37 #include "gimple-fold.h"
38 #include "gimple-ssa.h"
39 #include "intl.h"
40 #include "attr-fnspec.h"
41 #include "gimple-range.h"
42 #include "pointer-query.h"
43 #include "tree-pretty-print.h"
44 #include "tree-ssanames.h"
45 #include "target.h"
47 static bool compute_objsize_r (tree, gimple *, bool, int, access_ref *,
48 ssa_name_limit_t &, pointer_query *);
50 /* Wrapper around the wide_int overload of get_range that accepts
51 offset_int instead. For middle end expressions returns the same
52 result. For a subset of nonconstamt expressions emitted by the front
53 end determines a more precise range than would be possible otherwise. */
55 static bool
56 get_offset_range (tree x, gimple *stmt, offset_int r[2], range_query *rvals)
58 offset_int add = 0;
59 if (TREE_CODE (x) == PLUS_EXPR)
61 /* Handle constant offsets in pointer addition expressions seen
62 n the front end IL. */
63 tree op = TREE_OPERAND (x, 1);
64 if (TREE_CODE (op) == INTEGER_CST)
66 op = fold_convert (signed_type_for (TREE_TYPE (op)), op);
67 add = wi::to_offset (op);
68 x = TREE_OPERAND (x, 0);
72 if (TREE_CODE (x) == NOP_EXPR)
73 /* Also handle conversions to sizetype seen in the front end IL. */
74 x = TREE_OPERAND (x, 0);
76 tree type = TREE_TYPE (x);
77 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
78 return false;
80 if (TREE_CODE (x) != INTEGER_CST
81 && TREE_CODE (x) != SSA_NAME)
83 if (TYPE_UNSIGNED (type)
84 && TYPE_PRECISION (type) == TYPE_PRECISION (sizetype))
85 type = signed_type_for (type);
87 r[0] = wi::to_offset (TYPE_MIN_VALUE (type)) + add;
88 r[1] = wi::to_offset (TYPE_MAX_VALUE (type)) + add;
89 return x;
92 wide_int wr[2];
93 if (!get_range (x, stmt, wr, rvals))
94 return false;
96 signop sgn = SIGNED;
97 /* Only convert signed integers or unsigned sizetype to a signed
98 offset and avoid converting large positive values in narrower
99 types to negative offsets. */
100 if (TYPE_UNSIGNED (type)
101 && wr[0].get_precision () < TYPE_PRECISION (sizetype))
102 sgn = UNSIGNED;
104 r[0] = offset_int::from (wr[0], sgn);
105 r[1] = offset_int::from (wr[1], sgn);
106 return true;
109 /* Return the argument that the call STMT to a built-in function returns
110 or null if it doesn't. On success, set OFFRNG[] to the range of offsets
111 from the argument reflected in the value returned by the built-in if it
112 can be determined, otherwise to 0 and HWI_M1U respectively. Set
113 *PAST_END for functions like mempcpy that might return a past the end
114 pointer (most functions return a dereferenceable pointer to an existing
115 element of an array). */
117 static tree
118 gimple_call_return_array (gimple *stmt, offset_int offrng[2], bool *past_end,
119 ssa_name_limit_t &snlim, pointer_query *qry)
121 /* Clear and set below for the rare function(s) that might return
122 a past-the-end pointer. */
123 *past_end = false;
126 /* Check for attribute fn spec to see if the function returns one
127 of its arguments. */
128 attr_fnspec fnspec = gimple_call_fnspec (as_a <gcall *>(stmt));
129 unsigned int argno;
130 if (fnspec.returns_arg (&argno))
132 /* Functions return the first argument (not a range). */
133 offrng[0] = offrng[1] = 0;
134 return gimple_call_arg (stmt, argno);
138 if (gimple_call_num_args (stmt) < 1)
139 return NULL_TREE;
141 tree fn = gimple_call_fndecl (stmt);
142 if (!gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
144 /* See if this is a call to placement new. */
145 if (!fn
146 || !DECL_IS_OPERATOR_NEW_P (fn)
147 || DECL_IS_REPLACEABLE_OPERATOR_NEW_P (fn))
148 return NULL_TREE;
150 /* Check the mangling, keeping in mind that operator new takes
151 a size_t which could be unsigned int or unsigned long. */
152 tree fname = DECL_ASSEMBLER_NAME (fn);
153 if (!id_equal (fname, "_ZnwjPv") // ordinary form
154 && !id_equal (fname, "_ZnwmPv") // ordinary form
155 && !id_equal (fname, "_ZnajPv") // array form
156 && !id_equal (fname, "_ZnamPv")) // array form
157 return NULL_TREE;
159 if (gimple_call_num_args (stmt) != 2)
160 return NULL_TREE;
162 /* Allocation functions return a pointer to the beginning. */
163 offrng[0] = offrng[1] = 0;
164 return gimple_call_arg (stmt, 1);
167 switch (DECL_FUNCTION_CODE (fn))
169 case BUILT_IN_MEMCPY:
170 case BUILT_IN_MEMCPY_CHK:
171 case BUILT_IN_MEMMOVE:
172 case BUILT_IN_MEMMOVE_CHK:
173 case BUILT_IN_MEMSET:
174 case BUILT_IN_STRCAT:
175 case BUILT_IN_STRCAT_CHK:
176 case BUILT_IN_STRCPY:
177 case BUILT_IN_STRCPY_CHK:
178 case BUILT_IN_STRNCAT:
179 case BUILT_IN_STRNCAT_CHK:
180 case BUILT_IN_STRNCPY:
181 case BUILT_IN_STRNCPY_CHK:
182 /* Functions return the first argument (not a range). */
183 offrng[0] = offrng[1] = 0;
184 return gimple_call_arg (stmt, 0);
186 case BUILT_IN_MEMPCPY:
187 case BUILT_IN_MEMPCPY_CHK:
189 /* The returned pointer is in a range constrained by the smaller
190 of the upper bound of the size argument and the source object
191 size. */
192 offrng[0] = 0;
193 offrng[1] = HOST_WIDE_INT_M1U;
194 tree off = gimple_call_arg (stmt, 2);
195 bool off_valid = get_offset_range (off, stmt, offrng, qry->rvals);
196 if (!off_valid || offrng[0] != offrng[1])
198 /* If the offset is either indeterminate or in some range,
199 try to constrain its upper bound to at most the size
200 of the source object. */
201 access_ref aref;
202 tree src = gimple_call_arg (stmt, 1);
203 if (compute_objsize_r (src, stmt, false, 1, &aref, snlim, qry)
204 && aref.sizrng[1] < offrng[1])
205 offrng[1] = aref.sizrng[1];
208 /* Mempcpy may return a past-the-end pointer. */
209 *past_end = true;
210 return gimple_call_arg (stmt, 0);
213 case BUILT_IN_MEMCHR:
215 tree off = gimple_call_arg (stmt, 2);
216 if (get_offset_range (off, stmt, offrng, qry->rvals))
217 offrng[1] -= 1;
218 else
219 offrng[1] = HOST_WIDE_INT_M1U;
221 offrng[0] = 0;
222 return gimple_call_arg (stmt, 0);
225 case BUILT_IN_STRCHR:
226 case BUILT_IN_STRRCHR:
227 case BUILT_IN_STRSTR:
228 offrng[0] = 0;
229 offrng[1] = HOST_WIDE_INT_M1U;
230 return gimple_call_arg (stmt, 0);
232 case BUILT_IN_STPCPY:
233 case BUILT_IN_STPCPY_CHK:
235 access_ref aref;
236 tree src = gimple_call_arg (stmt, 1);
237 if (compute_objsize_r (src, stmt, false, 1, &aref, snlim, qry))
238 offrng[1] = aref.sizrng[1] - 1;
239 else
240 offrng[1] = HOST_WIDE_INT_M1U;
242 offrng[0] = 0;
243 return gimple_call_arg (stmt, 0);
246 case BUILT_IN_STPNCPY:
247 case BUILT_IN_STPNCPY_CHK:
249 /* The returned pointer is in a range between the first argument
250 and it plus the smaller of the upper bound of the size argument
251 and the source object size. */
252 offrng[1] = HOST_WIDE_INT_M1U;
253 tree off = gimple_call_arg (stmt, 2);
254 if (!get_offset_range (off, stmt, offrng, qry->rvals)
255 || offrng[0] != offrng[1])
257 /* If the offset is either indeterminate or in some range,
258 try to constrain its upper bound to at most the size
259 of the source object. */
260 access_ref aref;
261 tree src = gimple_call_arg (stmt, 1);
262 if (compute_objsize_r (src, stmt, false, 1, &aref, snlim, qry)
263 && aref.sizrng[1] < offrng[1])
264 offrng[1] = aref.sizrng[1];
267 /* When the source is the empty string the returned pointer is
268 a copy of the argument. Otherwise stpcpy can also return
269 a past-the-end pointer. */
270 offrng[0] = 0;
271 *past_end = true;
272 return gimple_call_arg (stmt, 0);
275 default:
276 break;
279 return NULL_TREE;
282 /* Return true when EXP's range can be determined and set RANGE[] to it
283 after adjusting it if necessary to make EXP a represents a valid size
284 of object, or a valid size argument to an allocation function declared
285 with attribute alloc_size (whose argument may be signed), or to a string
286 manipulation function like memset.
287 When ALLOW_ZERO is set in FLAGS, allow returning a range of [0, 0] for
288 a size in an anti-range [1, N] where N > PTRDIFF_MAX. A zero range is
289 a (nearly) invalid argument to allocation functions like malloc but it
290 is a valid argument to functions like memset.
291 When USE_LARGEST is set in FLAGS set RANGE to the largest valid subrange
292 in a multi-range, otherwise to the smallest valid subrange. */
294 bool
295 get_size_range (range_query *query, tree exp, gimple *stmt, tree range[2],
296 int flags /* = 0 */)
298 if (!exp)
299 return false;
301 if (tree_fits_uhwi_p (exp))
303 /* EXP is a constant. */
304 range[0] = range[1] = exp;
305 return true;
308 tree exptype = TREE_TYPE (exp);
309 bool integral = INTEGRAL_TYPE_P (exptype);
311 wide_int min, max;
312 enum value_range_kind range_type;
314 if (!query)
315 query = get_range_query (cfun);
317 if (integral)
319 value_range vr;
321 query->range_of_expr (vr, exp, stmt);
323 if (vr.undefined_p ())
324 vr.set_varying (TREE_TYPE (exp));
325 range_type = vr.kind ();
326 min = wi::to_wide (vr.min ());
327 max = wi::to_wide (vr.max ());
329 else
330 range_type = VR_VARYING;
332 if (range_type == VR_VARYING)
334 if (integral)
336 /* Use the full range of the type of the expression when
337 no value range information is available. */
338 range[0] = TYPE_MIN_VALUE (exptype);
339 range[1] = TYPE_MAX_VALUE (exptype);
340 return true;
343 range[0] = NULL_TREE;
344 range[1] = NULL_TREE;
345 return false;
348 unsigned expprec = TYPE_PRECISION (exptype);
350 bool signed_p = !TYPE_UNSIGNED (exptype);
352 if (range_type == VR_ANTI_RANGE)
354 if (signed_p)
356 if (wi::les_p (max, 0))
358 /* EXP is not in a strictly negative range. That means
359 it must be in some (not necessarily strictly) positive
360 range which includes zero. Since in signed to unsigned
361 conversions negative values end up converted to large
362 positive values, and otherwise they are not valid sizes,
363 the resulting range is in both cases [0, TYPE_MAX]. */
364 min = wi::zero (expprec);
365 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
367 else if (wi::les_p (min - 1, 0))
369 /* EXP is not in a negative-positive range. That means EXP
370 is either negative, or greater than max. Since negative
371 sizes are invalid make the range [MAX + 1, TYPE_MAX]. */
372 min = max + 1;
373 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
375 else
377 max = min - 1;
378 min = wi::zero (expprec);
381 else
383 wide_int maxsize = wi::to_wide (max_object_size ());
384 min = wide_int::from (min, maxsize.get_precision (), UNSIGNED);
385 max = wide_int::from (max, maxsize.get_precision (), UNSIGNED);
386 if (wi::eq_p (0, min - 1))
388 /* EXP is unsigned and not in the range [1, MAX]. That means
389 it's either zero or greater than MAX. Even though 0 would
390 normally be detected by -Walloc-zero, unless ALLOW_ZERO
391 is set, set the range to [MAX, TYPE_MAX] so that when MAX
392 is greater than the limit the whole range is diagnosed. */
393 wide_int maxsize = wi::to_wide (max_object_size ());
394 if (flags & SR_ALLOW_ZERO)
396 if (wi::leu_p (maxsize, max + 1)
397 || !(flags & SR_USE_LARGEST))
398 min = max = wi::zero (expprec);
399 else
401 min = max + 1;
402 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
405 else
407 min = max + 1;
408 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
411 else if ((flags & SR_USE_LARGEST)
412 && wi::ltu_p (max + 1, maxsize))
414 /* When USE_LARGEST is set and the larger of the two subranges
415 is a valid size, use it... */
416 min = max + 1;
417 max = maxsize;
419 else
421 /* ...otherwise use the smaller subrange. */
422 max = min - 1;
423 min = wi::zero (expprec);
428 range[0] = wide_int_to_tree (exptype, min);
429 range[1] = wide_int_to_tree (exptype, max);
431 return true;
434 bool
435 get_size_range (tree exp, tree range[2], int flags /* = 0 */)
437 return get_size_range (/*query=*/NULL, exp, /*stmt=*/NULL, range, flags);
440 /* If STMT is a call to an allocation function, returns the constant
441 maximum size of the object allocated by the call represented as
442 sizetype. If nonnull, sets RNG1[] to the range of the size.
443 When nonnull, uses RVALS for range information, otherwise gets global
444 range info.
445 Returns null when STMT is not a call to a valid allocation function. */
447 tree
448 gimple_call_alloc_size (gimple *stmt, wide_int rng1[2] /* = NULL */,
449 range_query *qry /* = NULL */)
451 if (!stmt || !is_gimple_call (stmt))
452 return NULL_TREE;
454 tree allocfntype;
455 if (tree fndecl = gimple_call_fndecl (stmt))
456 allocfntype = TREE_TYPE (fndecl);
457 else
458 allocfntype = gimple_call_fntype (stmt);
460 if (!allocfntype)
461 return NULL_TREE;
463 unsigned argidx1 = UINT_MAX, argidx2 = UINT_MAX;
464 tree at = lookup_attribute ("alloc_size", TYPE_ATTRIBUTES (allocfntype));
465 if (!at)
467 if (!gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN))
468 return NULL_TREE;
470 argidx1 = 0;
473 unsigned nargs = gimple_call_num_args (stmt);
475 if (argidx1 == UINT_MAX)
477 tree atval = TREE_VALUE (at);
478 if (!atval)
479 return NULL_TREE;
481 argidx1 = TREE_INT_CST_LOW (TREE_VALUE (atval)) - 1;
482 if (nargs <= argidx1)
483 return NULL_TREE;
485 atval = TREE_CHAIN (atval);
486 if (atval)
488 argidx2 = TREE_INT_CST_LOW (TREE_VALUE (atval)) - 1;
489 if (nargs <= argidx2)
490 return NULL_TREE;
494 tree size = gimple_call_arg (stmt, argidx1);
496 wide_int rng1_buf[2];
497 /* If RNG1 is not set, use the buffer. */
498 if (!rng1)
499 rng1 = rng1_buf;
501 /* Use maximum precision to avoid overflow below. */
502 const int prec = ADDR_MAX_PRECISION;
505 tree r[2];
506 /* Determine the largest valid range size, including zero. */
507 if (!get_size_range (qry, size, stmt, r, SR_ALLOW_ZERO | SR_USE_LARGEST))
508 return NULL_TREE;
509 rng1[0] = wi::to_wide (r[0], prec);
510 rng1[1] = wi::to_wide (r[1], prec);
513 if (argidx2 > nargs && TREE_CODE (size) == INTEGER_CST)
514 return fold_convert (sizetype, size);
516 /* To handle ranges do the math in wide_int and return the product
517 of the upper bounds as a constant. Ignore anti-ranges. */
518 tree n = argidx2 < nargs ? gimple_call_arg (stmt, argidx2) : integer_one_node;
519 wide_int rng2[2];
521 tree r[2];
522 /* As above, use the full non-negative range on failure. */
523 if (!get_size_range (qry, n, stmt, r, SR_ALLOW_ZERO | SR_USE_LARGEST))
524 return NULL_TREE;
525 rng2[0] = wi::to_wide (r[0], prec);
526 rng2[1] = wi::to_wide (r[1], prec);
529 /* Compute products of both bounds for the caller but return the lesser
530 of SIZE_MAX and the product of the upper bounds as a constant. */
531 rng1[0] = rng1[0] * rng2[0];
532 rng1[1] = rng1[1] * rng2[1];
534 const tree size_max = TYPE_MAX_VALUE (sizetype);
535 if (wi::gtu_p (rng1[1], wi::to_wide (size_max, prec)))
537 rng1[1] = wi::to_wide (size_max, prec);
538 return size_max;
541 return wide_int_to_tree (sizetype, rng1[1]);
544 /* For an access to an object referenced to by the function parameter PTR
545 of pointer type, and set RNG[] to the range of sizes of the object
546 obtainedfrom the attribute access specification for the current function.
547 Set STATIC_ARRAY if the array parameter has been declared [static].
548 Return the function parameter on success and null otherwise. */
550 static tree
551 gimple_parm_array_size (tree ptr, wide_int rng[2],
552 bool *static_array /* = NULL */)
554 /* For a function argument try to determine the byte size of the array
555 from the current function declaratation (e.g., attribute access or
556 related). */
557 tree var = SSA_NAME_VAR (ptr);
558 if (TREE_CODE (var) != PARM_DECL || !POINTER_TYPE_P (TREE_TYPE (var)))
559 return NULL_TREE;
561 const unsigned prec = TYPE_PRECISION (sizetype);
563 rdwr_map rdwr_idx;
564 attr_access *access = get_parm_access (rdwr_idx, var);
565 if (!access)
566 return NULL_TREE;
568 if (access->sizarg != UINT_MAX)
570 /* TODO: Try to extract the range from the argument based on
571 those of subsequent assertions or based on known calls to
572 the current function. */
573 return NULL_TREE;
576 if (!access->minsize)
577 return NULL_TREE;
579 /* Only consider ordinary array bound at level 2 (or above if it's
580 ever added). */
581 if (warn_array_parameter < 2 && !access->static_p)
582 return NULL_TREE;
584 if (static_array)
585 *static_array = access->static_p;
587 rng[0] = wi::zero (prec);
588 rng[1] = wi::uhwi (access->minsize, prec);
589 /* Multiply the array bound encoded in the attribute by the size
590 of what the pointer argument to which it decays points to. */
591 tree eltype = TREE_TYPE (TREE_TYPE (ptr));
592 tree size = TYPE_SIZE_UNIT (eltype);
593 if (!size || TREE_CODE (size) != INTEGER_CST)
594 return NULL_TREE;
596 rng[1] *= wi::to_wide (size, prec);
597 return var;
600 /* Initialize the object. */
602 access_ref::access_ref ()
603 : ref (), eval ([](tree x){ return x; }), deref (), ref_nullptr_p (false),
604 trail1special (true), base0 (true), parmarray ()
606 /* Set to valid. */
607 offrng[0] = offrng[1] = 0;
608 offmax[0] = offmax[1] = 0;
609 /* Invalidate. */
610 sizrng[0] = sizrng[1] = -1;
613 /* Return the PHI node REF refers to or null if it doesn't. */
615 gphi *
616 access_ref::phi () const
618 if (!ref || TREE_CODE (ref) != SSA_NAME)
619 return NULL;
621 gimple *def_stmt = SSA_NAME_DEF_STMT (ref);
622 if (!def_stmt || gimple_code (def_stmt) != GIMPLE_PHI)
623 return NULL;
625 return as_a <gphi *> (def_stmt);
628 /* Determine the size and offset for ARG, append it to ALL_REFS, and
629 merge the result with *THIS. Ignore ARG if SKIP_NULL is set and
630 ARG refers to the null pointer. Return true on success and false
631 on failure. */
633 void
634 access_ref::merge_ref (vec<access_ref> *all_refs, tree arg, gimple *stmt,
635 int ostype, bool skip_null,
636 ssa_name_limit_t &snlim, pointer_query &qry)
638 access_ref aref;
639 if (!compute_objsize_r (arg, stmt, false, ostype, &aref, snlim, &qry)
640 || aref.sizrng[0] < 0)
642 /* This may be a PHI with all null pointer arguments. Handle it
643 conservatively by setting all properties to the most permissive
644 values. */
645 base0 = false;
646 offrng[0] = offrng[1] = 0;
647 add_max_offset ();
648 set_max_size_range ();
649 return;
652 if (all_refs)
654 access_ref dummy_ref;
655 aref.get_ref (all_refs, &dummy_ref, ostype, &snlim, &qry);
658 if (TREE_CODE (arg) == SSA_NAME)
659 qry.put_ref (arg, aref, ostype);
661 if (all_refs)
662 all_refs->safe_push (aref);
664 aref.deref += deref;
666 bool merged_parmarray = aref.parmarray;
668 const bool nullp = skip_null && integer_zerop (arg);
669 const offset_int maxobjsize = wi::to_offset (max_object_size ());
670 offset_int minsize = sizrng[0];
672 if (sizrng[0] < 0)
674 /* If *THIS doesn't contain a meaningful result yet set it to AREF
675 unless the argument is null and it's okay to ignore it. */
676 if (!nullp)
677 *this = aref;
679 /* Set if the current argument refers to one or more objects of
680 known size (or range of sizes), as opposed to referring to
681 one or more unknown object(s). */
682 const bool arg_known_size = (aref.sizrng[0] != 0
683 || aref.sizrng[1] != maxobjsize);
684 if (arg_known_size)
685 sizrng[0] = aref.sizrng[0];
687 return;
690 /* Disregard null pointers in PHIs with two or more arguments.
691 TODO: Handle this better! */
692 if (nullp)
693 return;
695 const bool known_size = (sizrng[0] != 0 || sizrng[1] != maxobjsize);
697 if (known_size && aref.sizrng[0] < minsize)
698 minsize = aref.sizrng[0];
700 /* Extend the size and offset of *THIS to account for AREF. The result
701 can be cached but results in false negatives. */
703 offset_int orng[2];
704 if (sizrng[1] < aref.sizrng[1])
706 orng[0] = offrng[0];
707 orng[1] = offrng[1];
708 *this = aref;
710 else
712 orng[0] = aref.offrng[0];
713 orng[1] = aref.offrng[1];
716 if (orng[0] < offrng[0])
717 offrng[0] = orng[0];
718 if (offrng[1] < orng[1])
719 offrng[1] = orng[1];
721 /* Reset the PHI's BASE0 flag if any of the nonnull arguments
722 refers to an object at an unknown offset. */
723 if (!aref.base0)
724 base0 = false;
726 sizrng[0] = minsize;
727 parmarray = merged_parmarray;
729 return;
732 /* Determine and return the largest object to which *THIS refers. If
733 *THIS refers to a PHI and PREF is nonnull, fill *PREF with the details
734 of the object determined by compute_objsize(ARG, OSTYPE) for each PHI
735 argument ARG. */
737 tree
738 access_ref::get_ref (vec<access_ref> *all_refs,
739 access_ref *pref /* = NULL */,
740 int ostype /* = 1 */,
741 ssa_name_limit_t *psnlim /* = NULL */,
742 pointer_query *qry /* = NULL */) const
744 if (!ref || TREE_CODE (ref) != SSA_NAME)
745 return NULL;
747 /* FIXME: Calling get_ref() with a null PSNLIM is dangerous and might
748 cause unbounded recursion. */
749 ssa_name_limit_t snlim_buf;
750 if (!psnlim)
751 psnlim = &snlim_buf;
753 pointer_query empty_qry;
754 if (!qry)
755 qry = &empty_qry;
757 if (gimple *def_stmt = SSA_NAME_DEF_STMT (ref))
759 if (is_gimple_assign (def_stmt))
761 tree_code code = gimple_assign_rhs_code (def_stmt);
762 if (code != MIN_EXPR && code != MAX_EXPR)
763 return NULL_TREE;
765 access_ref aref;
766 tree arg1 = gimple_assign_rhs1 (def_stmt);
767 aref.merge_ref (all_refs, arg1, def_stmt, ostype, false,
768 *psnlim, *qry);
770 tree arg2 = gimple_assign_rhs2 (def_stmt);
771 aref.merge_ref (all_refs, arg2, def_stmt, ostype, false,
772 *psnlim, *qry);
774 if (pref && pref != this)
776 tree ref = pref->ref;
777 *pref = aref;
778 pref->ref = ref;
781 return aref.ref;
784 else
785 return NULL_TREE;
787 gphi *phi_stmt = this->phi ();
788 if (!phi_stmt)
789 return ref;
791 if (!psnlim->visit_phi (ref))
792 return NULL_TREE;
794 /* The conservative result of the PHI reflecting the offset and size
795 of the largest PHI argument, regardless of whether or not they all
796 refer to the same object. */
797 access_ref phi_ref;
798 if (pref)
800 /* The identity of the object has not been determined yet but
801 PREF->REF is set by the caller to the PHI for convenience.
802 The size is negative/invalid and the offset is zero (it's
803 updated only after the identity of the object has been
804 established). */
805 gcc_assert (pref->sizrng[0] < 0);
806 gcc_assert (pref->offrng[0] == 0 && pref->offrng[1] == 0);
808 phi_ref = *pref;
811 const offset_int maxobjsize = wi::to_offset (max_object_size ());
812 const unsigned nargs = gimple_phi_num_args (phi_stmt);
813 for (unsigned i = 0; i < nargs; ++i)
815 access_ref phi_arg_ref;
816 bool skip_null = i || i + 1 < nargs;
817 tree arg = gimple_phi_arg_def (phi_stmt, i);
818 phi_ref.merge_ref (all_refs, arg, phi_stmt, ostype, skip_null,
819 *psnlim, *qry);
821 if (!phi_ref.base0
822 && phi_ref.sizrng[0] == 0
823 && phi_ref.sizrng[1] >= maxobjsize)
824 /* When an argument results in the most permissive result,
825 the remaining arguments cannot constrain it. Short-circuit
826 the evaluation. */
827 break;
830 if (phi_ref.sizrng[0] < 0)
832 /* Fail if none of the PHI's arguments resulted in updating PHI_REF
833 (perhaps because they have all been already visited by prior
834 recursive calls). */
835 psnlim->leave_phi (ref);
836 return NULL_TREE;
839 /* Avoid changing *THIS. */
840 if (pref && pref != this)
842 /* Keep the SSA_NAME of the PHI unchanged so that all PHI arguments
843 can be referred to later if necessary. This is useful even if
844 they all refer to the same object. */
845 tree ref = pref->ref;
846 *pref = phi_ref;
847 pref->ref = ref;
850 psnlim->leave_phi (ref);
852 return phi_ref.ref;
855 /* Return the maximum amount of space remaining and if non-null, set
856 argument to the minimum. */
858 offset_int
859 access_ref::size_remaining (offset_int *pmin /* = NULL */) const
861 offset_int minbuf;
862 if (!pmin)
863 pmin = &minbuf;
865 if (sizrng[0] < 0)
867 /* If the identity of the object hasn't been determined return
868 the maximum size range. */
869 *pmin = 0;
870 return wi::to_offset (max_object_size ());
873 /* add_offset() ensures the offset range isn't inverted. */
874 gcc_checking_assert (offrng[0] <= offrng[1]);
876 if (base0)
878 /* The offset into referenced object is zero-based (i.e., it's
879 not referenced by a pointer into middle of some unknown object). */
880 if (offrng[0] < 0 && offrng[1] < 0)
882 /* If the offset is negative the remaining size is zero. */
883 *pmin = 0;
884 return 0;
887 if (sizrng[1] <= offrng[0])
889 /* If the starting offset is greater than or equal to the upper
890 bound on the size of the object, the space remaining is zero.
891 As a special case, if it's equal, set *PMIN to -1 to let
892 the caller know the offset is valid and just past the end. */
893 *pmin = sizrng[1] == offrng[0] ? -1 : 0;
894 return 0;
897 /* Otherwise return the size minus the lower bound of the offset. */
898 offset_int or0 = offrng[0] < 0 ? 0 : offrng[0];
900 *pmin = sizrng[0] - or0;
901 return sizrng[1] - or0;
904 /* The offset to the referenced object isn't zero-based (i.e., it may
905 refer to a byte other than the first. The size of such an object
906 is constrained only by the size of the address space (the result
907 of max_object_size()). */
908 if (sizrng[1] <= offrng[0])
910 *pmin = 0;
911 return 0;
914 offset_int or0 = offrng[0] < 0 ? 0 : offrng[0];
916 *pmin = sizrng[0] - or0;
917 return sizrng[1] - or0;
920 /* Return true if the offset and object size are in range for SIZE. */
922 bool
923 access_ref::offset_in_range (const offset_int &size) const
925 if (size_remaining () < size)
926 return false;
928 if (base0)
929 return offmax[0] >= 0 && offmax[1] <= sizrng[1];
931 offset_int maxoff = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
932 return offmax[0] > -maxoff && offmax[1] < maxoff;
935 /* Add the range [MIN, MAX] to the offset range. For known objects (with
936 zero-based offsets) at least one of whose offset's bounds is in range,
937 constrain the other (or both) to the bounds of the object (i.e., zero
938 and the upper bound of its size). This improves the quality of
939 diagnostics. */
941 void access_ref::add_offset (const offset_int &min, const offset_int &max)
943 if (min <= max)
945 /* To add an ordinary range just add it to the bounds. */
946 offrng[0] += min;
947 offrng[1] += max;
949 else if (!base0)
951 /* To add an inverted range to an offset to an unknown object
952 expand it to the maximum. */
953 add_max_offset ();
954 return;
956 else
958 /* To add an inverted range to an offset to an known object set
959 the upper bound to the maximum representable offset value
960 (which may be greater than MAX_OBJECT_SIZE).
961 The lower bound is either the sum of the current offset and
962 MIN when abs(MAX) is greater than the former, or zero otherwise.
963 Zero because then the inverted range includes the negative of
964 the lower bound. */
965 offset_int maxoff = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
966 offrng[1] = maxoff;
968 if (max >= 0)
970 offrng[0] = 0;
971 if (offmax[0] > 0)
972 offmax[0] = 0;
973 return;
976 offset_int absmax = wi::abs (max);
977 if (offrng[0] < absmax)
979 offrng[0] += min;
980 /* Cap the lower bound at the upper (set to MAXOFF above)
981 to avoid inadvertently recreating an inverted range. */
982 if (offrng[1] < offrng[0])
983 offrng[0] = offrng[1];
985 else
986 offrng[0] = 0;
989 /* Set the minimum and maximmum computed so far. */
990 if (offrng[1] < 0 && offrng[1] < offmax[0])
991 offmax[0] = offrng[1];
992 if (offrng[0] > 0 && offrng[0] > offmax[1])
993 offmax[1] = offrng[0];
995 if (!base0)
996 return;
998 /* When referencing a known object check to see if the offset computed
999 so far is in bounds... */
1000 offset_int remrng[2];
1001 remrng[1] = size_remaining (remrng);
1002 if (remrng[1] > 0 || remrng[0] < 0)
1004 /* ...if so, constrain it so that neither bound exceeds the size of
1005 the object. Out of bounds offsets are left unchanged, and, for
1006 better or worse, become in bounds later. They should be detected
1007 and diagnosed at the point they first become invalid by
1008 -Warray-bounds. */
1009 if (offrng[0] < 0)
1010 offrng[0] = 0;
1011 if (offrng[1] > sizrng[1])
1012 offrng[1] = sizrng[1];
1016 /* Issue one inform message describing each target of an access REF.
1017 WRITE is set for a write access and clear for a read access. */
1019 void
1020 access_ref::inform_access (access_mode mode, int ostype /* = 1 */) const
1022 const access_ref &aref = *this;
1023 if (!aref.ref)
1024 return;
1026 if (phi ())
1028 /* Set MAXREF to refer to the largest object and fill ALL_REFS
1029 with data for all objects referenced by the PHI arguments. */
1030 access_ref maxref;
1031 auto_vec<access_ref> all_refs;
1032 if (!get_ref (&all_refs, &maxref, ostype))
1033 return;
1035 if (all_refs.length ())
1037 /* Except for MAXREF, the rest of the arguments' offsets need not
1038 reflect one added to the PHI itself. Determine the latter from
1039 MAXREF on which the result is based. */
1040 const offset_int orng[] =
1042 offrng[0] - maxref.offrng[0],
1043 wi::smax (offrng[1] - maxref.offrng[1], offrng[0]),
1046 /* Add the final PHI's offset to that of each of the arguments
1047 and recurse to issue an inform message for it. */
1048 for (unsigned i = 0; i != all_refs.length (); ++i)
1050 /* Skip any PHIs; those could lead to infinite recursion. */
1051 if (all_refs[i].phi ())
1052 continue;
1054 all_refs[i].add_offset (orng[0], orng[1]);
1055 all_refs[i].inform_access (mode, ostype);
1057 return;
1061 /* Convert offset range and avoid including a zero range since it
1062 isn't necessarily meaningful. */
1063 HOST_WIDE_INT diff_min = tree_to_shwi (TYPE_MIN_VALUE (ptrdiff_type_node));
1064 HOST_WIDE_INT diff_max = tree_to_shwi (TYPE_MAX_VALUE (ptrdiff_type_node));
1065 HOST_WIDE_INT minoff;
1066 HOST_WIDE_INT maxoff = diff_max;
1067 if (wi::fits_shwi_p (aref.offrng[0]))
1068 minoff = aref.offrng[0].to_shwi ();
1069 else
1070 minoff = aref.offrng[0] < 0 ? diff_min : diff_max;
1072 if (wi::fits_shwi_p (aref.offrng[1]))
1073 maxoff = aref.offrng[1].to_shwi ();
1075 if (maxoff <= diff_min || maxoff >= diff_max)
1076 /* Avoid mentioning an upper bound that's equal to or in excess
1077 of the maximum of ptrdiff_t. */
1078 maxoff = minoff;
1080 /* Convert size range and always include it since all sizes are
1081 meaningful. */
1082 unsigned long long minsize = 0, maxsize = 0;
1083 if (wi::fits_shwi_p (aref.sizrng[0])
1084 && wi::fits_shwi_p (aref.sizrng[1]))
1086 minsize = aref.sizrng[0].to_shwi ();
1087 maxsize = aref.sizrng[1].to_shwi ();
1090 /* SIZRNG doesn't necessarily have the same range as the allocation
1091 size determined by gimple_call_alloc_size (). */
1092 char sizestr[80];
1093 if (minsize == maxsize)
1094 sprintf (sizestr, "%llu", minsize);
1095 else
1096 sprintf (sizestr, "[%llu, %llu]", minsize, maxsize);
1098 char offstr[80];
1099 if (minoff == 0
1100 && (maxoff == 0 || aref.sizrng[1] <= maxoff))
1101 offstr[0] = '\0';
1102 else if (minoff == maxoff)
1103 sprintf (offstr, "%lli", (long long) minoff);
1104 else
1105 sprintf (offstr, "[%lli, %lli]", (long long) minoff, (long long) maxoff);
1107 location_t loc = UNKNOWN_LOCATION;
1109 tree ref = this->ref;
1110 tree allocfn = NULL_TREE;
1111 if (TREE_CODE (ref) == SSA_NAME)
1113 gimple *stmt = SSA_NAME_DEF_STMT (ref);
1114 if (!stmt)
1115 return;
1117 if (is_gimple_call (stmt))
1119 loc = gimple_location (stmt);
1120 if (gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN))
1122 /* Strip the SSA_NAME suffix from the variable name and
1123 recreate an identifier with the VLA's original name. */
1124 ref = gimple_call_lhs (stmt);
1125 if (SSA_NAME_IDENTIFIER (ref))
1127 ref = SSA_NAME_IDENTIFIER (ref);
1128 const char *id = IDENTIFIER_POINTER (ref);
1129 size_t len = strcspn (id, ".$");
1130 if (!len)
1131 len = strlen (id);
1132 ref = get_identifier_with_length (id, len);
1135 else
1137 /* Except for VLAs, retrieve the allocation function. */
1138 allocfn = gimple_call_fndecl (stmt);
1139 if (!allocfn)
1140 allocfn = gimple_call_fn (stmt);
1141 if (TREE_CODE (allocfn) == SSA_NAME)
1143 /* For an ALLOC_CALL via a function pointer make a small
1144 effort to determine the destination of the pointer. */
1145 gimple *def = SSA_NAME_DEF_STMT (allocfn);
1146 if (gimple_assign_single_p (def))
1148 tree rhs = gimple_assign_rhs1 (def);
1149 if (DECL_P (rhs))
1150 allocfn = rhs;
1151 else if (TREE_CODE (rhs) == COMPONENT_REF)
1152 allocfn = TREE_OPERAND (rhs, 1);
1157 else if (gimple_nop_p (stmt))
1158 /* Handle DECL_PARM below. */
1159 ref = SSA_NAME_VAR (ref);
1160 else if (is_gimple_assign (stmt)
1161 && (gimple_assign_rhs_code (stmt) == MIN_EXPR
1162 || gimple_assign_rhs_code (stmt) == MAX_EXPR))
1164 /* MIN or MAX_EXPR here implies a reference to a known object
1165 and either an unknown or distinct one (the latter being
1166 the result of an invalid relational expression). Determine
1167 the identity of the former and point to it in the note.
1168 TODO: Consider merging with PHI handling. */
1169 access_ref arg_ref[2];
1170 tree arg = gimple_assign_rhs1 (stmt);
1171 compute_objsize (arg, /* ostype = */ 1 , &arg_ref[0]);
1172 arg = gimple_assign_rhs2 (stmt);
1173 compute_objsize (arg, /* ostype = */ 1 , &arg_ref[1]);
1175 /* Use the argument that references a known object with more
1176 space remaining. */
1177 const bool idx
1178 = (!arg_ref[0].ref || !arg_ref[0].base0
1179 || (arg_ref[0].base0 && arg_ref[1].base0
1180 && (arg_ref[0].size_remaining ()
1181 < arg_ref[1].size_remaining ())));
1183 arg_ref[idx].offrng[0] = offrng[0];
1184 arg_ref[idx].offrng[1] = offrng[1];
1185 arg_ref[idx].inform_access (mode);
1186 return;
1190 if (DECL_P (ref))
1191 loc = DECL_SOURCE_LOCATION (ref);
1192 else if (EXPR_P (ref) && EXPR_HAS_LOCATION (ref))
1193 loc = EXPR_LOCATION (ref);
1194 else if (TREE_CODE (ref) != IDENTIFIER_NODE
1195 && TREE_CODE (ref) != SSA_NAME)
1197 if (TREE_CODE (ref) == INTEGER_CST && ref_nullptr_p)
1199 if (mode == access_read_write || mode == access_write_only)
1200 inform (loc, "destination object is likely at address zero");
1201 else
1202 inform (loc, "source object is likely at address zero");
1204 return;
1207 if (mode == access_read_write || mode == access_write_only)
1209 if (allocfn == NULL_TREE)
1211 if (*offstr)
1212 inform (loc, "at offset %s into destination object %qE of size %s",
1213 offstr, ref, sizestr);
1214 else
1215 inform (loc, "destination object %qE of size %s", ref, sizestr);
1216 return;
1219 if (*offstr)
1220 inform (loc,
1221 "at offset %s into destination object of size %s "
1222 "allocated by %qE", offstr, sizestr, allocfn);
1223 else
1224 inform (loc, "destination object of size %s allocated by %qE",
1225 sizestr, allocfn);
1226 return;
1229 if (mode == access_read_only)
1231 if (allocfn == NULL_TREE)
1233 if (*offstr)
1234 inform (loc, "at offset %s into source object %qE of size %s",
1235 offstr, ref, sizestr);
1236 else
1237 inform (loc, "source object %qE of size %s", ref, sizestr);
1239 return;
1242 if (*offstr)
1243 inform (loc,
1244 "at offset %s into source object of size %s allocated by %qE",
1245 offstr, sizestr, allocfn);
1246 else
1247 inform (loc, "source object of size %s allocated by %qE",
1248 sizestr, allocfn);
1249 return;
1252 if (allocfn == NULL_TREE)
1254 if (*offstr)
1255 inform (loc, "at offset %s into object %qE of size %s",
1256 offstr, ref, sizestr);
1257 else
1258 inform (loc, "object %qE of size %s", ref, sizestr);
1260 return;
1263 if (*offstr)
1264 inform (loc,
1265 "at offset %s into object of size %s allocated by %qE",
1266 offstr, sizestr, allocfn);
1267 else
1268 inform (loc, "object of size %s allocated by %qE",
1269 sizestr, allocfn);
1272 /* Dump *THIS to FILE. */
1274 void
1275 access_ref::dump (FILE *file) const
1277 for (int i = deref; i < 0; ++i)
1278 fputc ('&', file);
1280 for (int i = 0; i < deref; ++i)
1281 fputc ('*', file);
1283 if (gphi *phi_stmt = phi ())
1285 fputs ("PHI <", file);
1286 unsigned nargs = gimple_phi_num_args (phi_stmt);
1287 for (unsigned i = 0; i != nargs; ++i)
1289 tree arg = gimple_phi_arg_def (phi_stmt, i);
1290 print_generic_expr (file, arg);
1291 if (i + 1 < nargs)
1292 fputs (", ", file);
1294 fputc ('>', file);
1296 else
1297 print_generic_expr (file, ref);
1299 if (offrng[0] != offrng[1])
1300 fprintf (file, " + [%lli, %lli]",
1301 (long long) offrng[0].to_shwi (),
1302 (long long) offrng[1].to_shwi ());
1303 else if (offrng[0] != 0)
1304 fprintf (file, " %c %lli",
1305 offrng[0] < 0 ? '-' : '+',
1306 (long long) offrng[0].to_shwi ());
1308 if (base0)
1309 fputs (" (base0)", file);
1311 fputs ("; size: ", file);
1312 if (sizrng[0] != sizrng[1])
1314 offset_int maxsize = wi::to_offset (max_object_size ());
1315 if (sizrng[0] == 0 && sizrng[1] >= maxsize)
1316 fputs ("unknown", file);
1317 else
1318 fprintf (file, "[%llu, %llu]",
1319 (unsigned long long) sizrng[0].to_uhwi (),
1320 (unsigned long long) sizrng[1].to_uhwi ());
1322 else if (sizrng[0] != 0)
1323 fprintf (file, "%llu",
1324 (unsigned long long) sizrng[0].to_uhwi ());
1326 fputc ('\n', file);
1329 /* Set the access to at most MAXWRITE and MAXREAD bytes, and at least 1
1330 when MINWRITE or MINREAD, respectively, is set. */
1331 access_data::access_data (range_query *query, gimple *stmt, access_mode mode,
1332 tree maxwrite /* = NULL_TREE */,
1333 bool minwrite /* = false */,
1334 tree maxread /* = NULL_TREE */,
1335 bool minread /* = false */)
1336 : stmt (stmt), call (), dst (), src (), mode (mode), ostype ()
1338 set_bound (dst_bndrng, maxwrite, minwrite, query, stmt);
1339 set_bound (src_bndrng, maxread, minread, query, stmt);
1342 /* Set the access to at most MAXWRITE and MAXREAD bytes, and at least 1
1343 when MINWRITE or MINREAD, respectively, is set. */
1344 access_data::access_data (range_query *query, tree expr, access_mode mode,
1345 tree maxwrite /* = NULL_TREE */,
1346 bool minwrite /* = false */,
1347 tree maxread /* = NULL_TREE */,
1348 bool minread /* = false */)
1349 : stmt (), call (expr), dst (), src (), mode (mode), ostype ()
1351 set_bound (dst_bndrng, maxwrite, minwrite, query, stmt);
1352 set_bound (src_bndrng, maxread, minread, query, stmt);
1355 /* Set BNDRNG to the range of BOUND for the statement STMT. */
1357 void
1358 access_data::set_bound (offset_int bndrng[2], tree bound, bool minaccess,
1359 range_query *query, gimple *stmt)
1361 /* Set the default bounds of the access and adjust below. */
1362 bndrng[0] = minaccess ? 1 : 0;
1363 bndrng[1] = HOST_WIDE_INT_M1U;
1365 /* When BOUND is nonnull and a range can be extracted from it,
1366 set the bounds of the access to reflect both it and MINACCESS.
1367 BNDRNG[0] is the size of the minimum access. */
1368 tree rng[2];
1369 if (bound && get_size_range (query, bound, stmt, rng, SR_ALLOW_ZERO))
1371 bndrng[0] = wi::to_offset (rng[0]);
1372 bndrng[1] = wi::to_offset (rng[1]);
1373 bndrng[0] = bndrng[0] > 0 && minaccess ? 1 : 0;
1377 /* Set a bit for the PHI in VISITED and return true if it wasn't
1378 already set. */
1380 bool
1381 ssa_name_limit_t::visit_phi (tree ssa_name)
1383 if (!visited)
1384 visited = BITMAP_ALLOC (NULL);
1386 /* Return false if SSA_NAME has already been visited. */
1387 return bitmap_set_bit (visited, SSA_NAME_VERSION (ssa_name));
1390 /* Clear a bit for the PHI in VISITED. */
1392 void
1393 ssa_name_limit_t::leave_phi (tree ssa_name)
1395 /* Return false if SSA_NAME has already been visited. */
1396 bitmap_clear_bit (visited, SSA_NAME_VERSION (ssa_name));
1399 /* Return false if the SSA_NAME chain length counter has reached
1400 the limit, otherwise increment the counter and return true. */
1402 bool
1403 ssa_name_limit_t::next ()
1405 /* Return a negative value to let caller avoid recursing beyond
1406 the specified limit. */
1407 if (ssa_def_max == 0)
1408 return false;
1410 --ssa_def_max;
1411 return true;
1414 /* If the SSA_NAME has already been "seen" return a positive value.
1415 Otherwise add it to VISITED. If the SSA_NAME limit has been
1416 reached, return a negative value. Otherwise return zero. */
1419 ssa_name_limit_t::next_phi (tree ssa_name)
1422 gimple *def_stmt = SSA_NAME_DEF_STMT (ssa_name);
1423 /* Return a positive value if the PHI has already been visited. */
1424 if (gimple_code (def_stmt) == GIMPLE_PHI
1425 && !visit_phi (ssa_name))
1426 return 1;
1429 /* Return a negative value to let caller avoid recursing beyond
1430 the specified limit. */
1431 if (ssa_def_max == 0)
1432 return -1;
1434 --ssa_def_max;
1436 return 0;
1439 ssa_name_limit_t::~ssa_name_limit_t ()
1441 if (visited)
1442 BITMAP_FREE (visited);
1445 /* Default ctor. Initialize object with pointers to the range_query
1446 instance to use or null. */
1448 pointer_query::pointer_query (range_query *qry /* = NULL */)
1449 : rvals (qry), hits (), misses (), failures (), depth (), max_depth (),
1450 var_cache ()
1452 /* No op. */
1455 /* Return a pointer to the cached access_ref instance for the SSA_NAME
1456 PTR if it's there or null otherwise. */
1458 const access_ref *
1459 pointer_query::get_ref (tree ptr, int ostype /* = 1 */) const
1461 unsigned version = SSA_NAME_VERSION (ptr);
1462 unsigned idx = version << 1 | (ostype & 1);
1463 if (var_cache.indices.length () <= idx)
1465 ++misses;
1466 return NULL;
1469 unsigned cache_idx = var_cache.indices[idx];
1470 if (var_cache.access_refs.length () <= cache_idx)
1472 ++misses;
1473 return NULL;
1476 const access_ref &cache_ref = var_cache.access_refs[cache_idx];
1477 if (cache_ref.ref)
1479 ++hits;
1480 return &cache_ref;
1483 ++misses;
1484 return NULL;
1487 /* Retrieve the access_ref instance for a variable from the cache if it's
1488 there or compute it and insert it into the cache if it's nonnonull. */
1490 bool
1491 pointer_query::get_ref (tree ptr, gimple *stmt, access_ref *pref,
1492 int ostype /* = 1 */)
1494 const unsigned version
1495 = TREE_CODE (ptr) == SSA_NAME ? SSA_NAME_VERSION (ptr) : 0;
1497 if (version)
1499 unsigned idx = version << 1 | (ostype & 1);
1500 if (idx < var_cache.indices.length ())
1502 unsigned cache_idx = var_cache.indices[idx] - 1;
1503 if (cache_idx < var_cache.access_refs.length ()
1504 && var_cache.access_refs[cache_idx].ref)
1506 ++hits;
1507 *pref = var_cache.access_refs[cache_idx];
1508 return true;
1512 ++misses;
1515 if (!compute_objsize (ptr, stmt, ostype, pref, this))
1517 ++failures;
1518 return false;
1521 return true;
1524 /* Add a copy of the access_ref REF for the SSA_NAME to the cache if it's
1525 nonnull. */
1527 void
1528 pointer_query::put_ref (tree ptr, const access_ref &ref, int ostype /* = 1 */)
1530 /* Only add populated/valid entries. */
1531 if (!ref.ref || ref.sizrng[0] < 0)
1532 return;
1534 /* Add REF to the two-level cache. */
1535 unsigned version = SSA_NAME_VERSION (ptr);
1536 unsigned idx = version << 1 | (ostype & 1);
1538 /* Grow INDICES if necessary. An index is valid if it's nonzero.
1539 Its value minus one is the index into ACCESS_REFS. Not all
1540 entries are valid. */
1541 if (var_cache.indices.length () <= idx)
1542 var_cache.indices.safe_grow_cleared (idx + 1);
1544 if (!var_cache.indices[idx])
1545 var_cache.indices[idx] = var_cache.access_refs.length () + 1;
1547 /* Grow ACCESS_REF cache if necessary. An entry is valid if its
1548 REF member is nonnull. All entries except for the last two
1549 are valid. Once nonnull, the REF value must stay unchanged. */
1550 unsigned cache_idx = var_cache.indices[idx];
1551 if (var_cache.access_refs.length () <= cache_idx)
1552 var_cache.access_refs.safe_grow_cleared (cache_idx + 1);
1554 access_ref &cache_ref = var_cache.access_refs[cache_idx];
1555 if (cache_ref.ref)
1557 gcc_checking_assert (cache_ref.ref == ref.ref);
1558 return;
1561 cache_ref = ref;
1564 /* Flush the cache if it's nonnull. */
1566 void
1567 pointer_query::flush_cache ()
1569 var_cache.indices.release ();
1570 var_cache.access_refs.release ();
1573 /* Dump statistics and, optionally, cache contents to DUMP_FILE. */
1575 void
1576 pointer_query::dump (FILE *dump_file, bool contents /* = false */)
1578 unsigned nused = 0, nrefs = 0;
1579 unsigned nidxs = var_cache.indices.length ();
1580 for (unsigned i = 0; i != nidxs; ++i)
1582 unsigned ari = var_cache.indices[i];
1583 if (!ari)
1584 continue;
1586 ++nused;
1588 const access_ref &aref = var_cache.access_refs[ari];
1589 if (!aref.ref)
1590 continue;
1592 ++nrefs;
1595 fprintf (dump_file, "pointer_query counters:\n"
1596 " index cache size: %u\n"
1597 " index entries: %u\n"
1598 " access cache size: %u\n"
1599 " access entries: %u\n"
1600 " hits: %u\n"
1601 " misses: %u\n"
1602 " failures: %u\n"
1603 " max_depth: %u\n",
1604 nidxs, nused,
1605 var_cache.access_refs.length (), nrefs,
1606 hits, misses, failures, max_depth);
1608 if (!contents || !nidxs)
1609 return;
1611 fputs ("\npointer_query cache contents:\n", dump_file);
1613 for (unsigned i = 0; i != nidxs; ++i)
1615 unsigned ari = var_cache.indices[i];
1616 if (!ari)
1617 continue;
1619 const access_ref &aref = var_cache.access_refs[ari];
1620 if (!aref.ref)
1621 continue;
1623 /* The level-1 cache index corresponds to the SSA_NAME_VERSION
1624 shifted left by one and ORed with the Object Size Type in
1625 the lowest bit. Print the two separately. */
1626 unsigned ver = i >> 1;
1627 unsigned ost = i & 1;
1629 fprintf (dump_file, " %u.%u[%u]: ", ver, ost, ari);
1630 if (tree name = ssa_name (ver))
1632 print_generic_expr (dump_file, name);
1633 fputs (" = ", dump_file);
1635 else
1636 fprintf (dump_file, " _%u = ", ver);
1638 aref.dump (dump_file);
1641 fputc ('\n', dump_file);
1644 /* A helper of compute_objsize_r() to determine the size from an assignment
1645 statement STMT with the RHS of either MIN_EXPR or MAX_EXPR. On success
1646 set PREF->REF to the operand with more or less space remaining,
1647 respectively, if both refer to the same (sub)object, or to PTR if they
1648 might not, and return true. Otherwise, if the identity of neither
1649 operand can be determined, return false. */
1651 static bool
1652 handle_min_max_size (tree ptr, int ostype, access_ref *pref,
1653 ssa_name_limit_t &snlim, pointer_query *qry)
1655 gimple *stmt = SSA_NAME_DEF_STMT (ptr);
1656 const tree_code code = gimple_assign_rhs_code (stmt);
1658 /* In a valid MAX_/MIN_EXPR both operands must refer to the same array.
1659 Determine the size/offset of each and use the one with more or less
1660 space remaining, respectively. If either fails, use the information
1661 determined from the other instead, adjusted up or down as appropriate
1662 for the expression. */
1663 access_ref aref[2] = { *pref, *pref };
1664 tree arg1 = gimple_assign_rhs1 (stmt);
1665 if (!compute_objsize_r (arg1, stmt, false, ostype, &aref[0], snlim, qry))
1667 aref[0].base0 = false;
1668 aref[0].offrng[0] = aref[0].offrng[1] = 0;
1669 aref[0].add_max_offset ();
1670 aref[0].set_max_size_range ();
1673 tree arg2 = gimple_assign_rhs2 (stmt);
1674 if (!compute_objsize_r (arg2, stmt, false, ostype, &aref[1], snlim, qry))
1676 aref[1].base0 = false;
1677 aref[1].offrng[0] = aref[1].offrng[1] = 0;
1678 aref[1].add_max_offset ();
1679 aref[1].set_max_size_range ();
1682 if (!aref[0].ref && !aref[1].ref)
1683 /* Fail if the identity of neither argument could be determined. */
1684 return false;
1686 bool i0 = false;
1687 if (aref[0].ref && aref[0].base0)
1689 if (aref[1].ref && aref[1].base0)
1691 /* If the object referenced by both arguments has been determined
1692 set *PREF to the one with more or less space remainng, whichever
1693 is appopriate for CODE.
1694 TODO: Indicate when the objects are distinct so it can be
1695 diagnosed. */
1696 i0 = code == MAX_EXPR;
1697 const bool i1 = !i0;
1699 if (aref[i0].size_remaining () < aref[i1].size_remaining ())
1700 *pref = aref[i1];
1701 else
1702 *pref = aref[i0];
1704 if (aref[i0].ref != aref[i1].ref)
1705 /* If the operands don't refer to the same (sub)object set
1706 PREF->REF to the SSA_NAME from which STMT was obtained
1707 so that both can be identified in a diagnostic. */
1708 pref->ref = ptr;
1710 return true;
1713 /* If only the object referenced by one of the arguments could be
1714 determined, use it and... */
1715 *pref = aref[0];
1716 i0 = true;
1718 else
1719 *pref = aref[1];
1721 const bool i1 = !i0;
1722 /* ...see if the offset obtained from the other pointer can be used
1723 to tighten up the bound on the offset obtained from the first. */
1724 if ((code == MAX_EXPR && aref[i1].offrng[1] < aref[i0].offrng[0])
1725 || (code == MIN_EXPR && aref[i0].offrng[0] < aref[i1].offrng[1]))
1727 pref->offrng[0] = aref[i0].offrng[0];
1728 pref->offrng[1] = aref[i0].offrng[1];
1731 /* Replace PTR->REF with the SSA_NAME to indicate the expression
1732 might not refer to the same (sub)object. */
1733 pref->ref = ptr;
1734 return true;
1737 /* A helper of compute_objsize_r() to determine the size of a DECL.
1738 Return true on success and (possibly in the future) false on failure. */
1740 static bool
1741 handle_decl (tree decl, bool addr, access_ref *pref)
1743 tree decl_type = TREE_TYPE (decl);
1745 pref->ref = decl;
1747 /* Reset the offset in case it was set by a prior call and not
1748 cleared by the caller. The offset is only adjusted after
1749 the identity of the object has been determined. */
1750 pref->offrng[0] = pref->offrng[1] = 0;
1752 if (!addr && POINTER_TYPE_P (decl_type))
1754 /* Set the maximum size if the reference is to the pointer
1755 itself (as opposed to what it points to), and clear
1756 BASE0 since the offset isn't necessarily zero-based. */
1757 pref->set_max_size_range ();
1758 pref->base0 = false;
1759 return true;
1762 /* Valid offsets into the object are nonnegative. */
1763 pref->base0 = true;
1765 if (tree size = decl_init_size (decl, false))
1766 if (TREE_CODE (size) == INTEGER_CST)
1768 pref->sizrng[0] = wi::to_offset (size);
1769 pref->sizrng[1] = pref->sizrng[0];
1770 return true;
1773 pref->set_max_size_range ();
1774 return true;
1777 /* A helper of compute_objsize_r() to determine the size from ARRAY_REF
1778 AREF. ADDR is true if PTR is the operand of ADDR_EXPR. Return true
1779 on success and false on failure. */
1781 static bool
1782 handle_array_ref (tree aref, gimple *stmt, bool addr, int ostype,
1783 access_ref *pref, ssa_name_limit_t &snlim,
1784 pointer_query *qry)
1786 gcc_assert (TREE_CODE (aref) == ARRAY_REF);
1788 tree arefop = TREE_OPERAND (aref, 0);
1789 tree reftype = TREE_TYPE (arefop);
1790 if (!addr && TREE_CODE (TREE_TYPE (reftype)) == POINTER_TYPE)
1791 /* Avoid arrays of pointers. FIXME: Hande pointers to arrays
1792 of known bound. */
1793 return false;
1795 if (!compute_objsize_r (arefop, stmt, addr, ostype, pref, snlim, qry))
1796 return false;
1798 offset_int orng[2];
1799 tree off = pref->eval (TREE_OPERAND (aref, 1));
1800 range_query *const rvals = qry ? qry->rvals : NULL;
1801 if (!get_offset_range (off, stmt, orng, rvals))
1803 /* Set ORNG to the maximum offset representable in ptrdiff_t. */
1804 orng[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
1805 orng[0] = -orng[1] - 1;
1808 /* Convert the array index range determined above to a byte offset. */
1809 tree lowbnd = array_ref_low_bound (aref);
1810 if (TREE_CODE (lowbnd) == INTEGER_CST && !integer_zerop (lowbnd))
1812 /* Adjust the index by the low bound of the array domain (0 in C/C++,
1813 1 in Fortran and anything in Ada) by applying the same processing
1814 as in get_offset_range. */
1815 const wide_int wlb = wi::to_wide (lowbnd);
1816 signop sgn = SIGNED;
1817 if (TYPE_UNSIGNED (TREE_TYPE (lowbnd))
1818 && wlb.get_precision () < TYPE_PRECISION (sizetype))
1819 sgn = UNSIGNED;
1820 const offset_int lb = offset_int::from (wlb, sgn);
1821 orng[0] -= lb;
1822 orng[1] -= lb;
1825 tree eltype = TREE_TYPE (aref);
1826 tree tpsize = TYPE_SIZE_UNIT (eltype);
1827 if (!tpsize || TREE_CODE (tpsize) != INTEGER_CST)
1829 pref->add_max_offset ();
1830 return true;
1833 offset_int sz = wi::to_offset (tpsize);
1834 orng[0] *= sz;
1835 orng[1] *= sz;
1837 if (ostype && TREE_CODE (eltype) == ARRAY_TYPE)
1839 /* Except for the permissive raw memory functions which use
1840 the size of the whole object determined above, use the size
1841 of the referenced array. Because the overall offset is from
1842 the beginning of the complete array object add this overall
1843 offset to the size of array. */
1844 offset_int sizrng[2] =
1846 pref->offrng[0] + orng[0] + sz,
1847 pref->offrng[1] + orng[1] + sz
1849 if (sizrng[1] < sizrng[0])
1850 std::swap (sizrng[0], sizrng[1]);
1851 if (sizrng[0] >= 0 && sizrng[0] <= pref->sizrng[0])
1852 pref->sizrng[0] = sizrng[0];
1853 if (sizrng[1] >= 0 && sizrng[1] <= pref->sizrng[1])
1854 pref->sizrng[1] = sizrng[1];
1857 pref->add_offset (orng[0], orng[1]);
1858 return true;
1861 /* Given a COMPONENT_REF CREF, set *PREF size to the size of the referenced
1862 member. */
1864 static void
1865 set_component_ref_size (tree cref, access_ref *pref)
1867 const tree base = TREE_OPERAND (cref, 0);
1868 const tree base_type = TREE_TYPE (base);
1870 /* SAM is set for array members that might need special treatment. */
1871 special_array_member sam;
1872 tree size = component_ref_size (cref, &sam);
1873 if (sam == special_array_member::int_0)
1874 pref->sizrng[0] = pref->sizrng[1] = 0;
1875 else if (!pref->trail1special && sam == special_array_member::trail_1)
1876 pref->sizrng[0] = pref->sizrng[1] = 1;
1877 else if (size && TREE_CODE (size) == INTEGER_CST)
1878 pref->sizrng[0] = pref->sizrng[1] = wi::to_offset (size);
1879 else
1881 /* When the size of the member is unknown it's either a flexible
1882 array member or a trailing special array member (either zero
1883 length or one-element). Set the size to the maximum minus
1884 the constant size of the base object's type. */
1885 pref->sizrng[0] = 0;
1886 pref->sizrng[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
1887 if (tree base_size = TYPE_SIZE_UNIT (base_type))
1888 if (TREE_CODE (base_size) == INTEGER_CST)
1889 pref->sizrng[1] -= wi::to_offset (base_size);
1893 /* A helper of compute_objsize_r() to determine the size from COMPONENT_REF
1894 CREF. Return true on success and false on failure. */
1896 static bool
1897 handle_component_ref (tree cref, gimple *stmt, bool addr, int ostype,
1898 access_ref *pref, ssa_name_limit_t &snlim,
1899 pointer_query *qry)
1901 gcc_assert (TREE_CODE (cref) == COMPONENT_REF);
1903 const tree base = TREE_OPERAND (cref, 0);
1904 const tree field = TREE_OPERAND (cref, 1);
1905 access_ref base_ref = *pref;
1907 /* Unconditionally determine the size of the base object (it could
1908 be smaller than the referenced member when the object is stored
1909 in a buffer with an insufficient size). */
1910 if (!compute_objsize_r (base, stmt, addr, 0, &base_ref, snlim, qry))
1911 return false;
1913 /* Add the offset of the member to the offset into the object computed
1914 so far. */
1915 tree offset = byte_position (field);
1916 if (TREE_CODE (offset) == INTEGER_CST)
1917 base_ref.add_offset (wi::to_offset (offset));
1918 else
1919 base_ref.add_max_offset ();
1921 if (!base_ref.ref)
1922 /* PREF->REF may have been already set to an SSA_NAME earlier
1923 to provide better context for diagnostics. In that case,
1924 leave it unchanged. */
1925 base_ref.ref = base;
1927 const tree base_type = TREE_TYPE (base);
1928 if (TREE_CODE (base_type) == UNION_TYPE)
1929 /* In accesses through union types consider the entire unions
1930 rather than just their members. */
1931 ostype = 0;
1933 if (ostype == 0)
1935 /* In OSTYPE zero (for raw memory functions like memcpy), use
1936 the maximum size instead if the identity of the enclosing
1937 object cannot be determined. */
1938 *pref = base_ref;
1939 return true;
1942 pref->ref = field;
1944 if (!addr && POINTER_TYPE_P (TREE_TYPE (field)))
1946 /* Set maximum size if the reference is to the pointer member
1947 itself (as opposed to what it points to). */
1948 pref->set_max_size_range ();
1949 return true;
1952 set_component_ref_size (cref, pref);
1954 if (base_ref.size_remaining () < pref->size_remaining ())
1955 /* Use the base object if it's smaller than the member. */
1956 *pref = base_ref;
1958 return true;
1961 /* A helper of compute_objsize_r() to determine the size from MEM_REF
1962 MREF. Return true on success and false on failure. */
1964 static bool
1965 handle_mem_ref (tree mref, gimple *stmt, int ostype, access_ref *pref,
1966 ssa_name_limit_t &snlim, pointer_query *qry)
1968 gcc_assert (TREE_CODE (mref) == MEM_REF);
1970 tree mreftype = TYPE_MAIN_VARIANT (TREE_TYPE (mref));
1971 if (VECTOR_TYPE_P (mreftype))
1973 /* Hack: Handle MEM_REFs of vector types as those to complete
1974 objects; those may be synthesized from multiple assignments
1975 to consecutive data members (see PR 93200 and 96963).
1976 FIXME: Vectorized assignments should only be present after
1977 vectorization so this hack is only necessary after it has
1978 run and could be avoided in calls from prior passes (e.g.,
1979 tree-ssa-strlen.cc).
1980 FIXME: Deal with this more generally, e.g., by marking up
1981 such MEM_REFs at the time they're created. */
1982 ostype = 0;
1985 tree mrefop = TREE_OPERAND (mref, 0);
1986 if (!compute_objsize_r (mrefop, stmt, false, ostype, pref, snlim, qry))
1987 return false;
1989 ++pref->deref;
1991 offset_int orng[2];
1992 tree off = pref->eval (TREE_OPERAND (mref, 1));
1993 range_query *const rvals = qry ? qry->rvals : NULL;
1994 if (!get_offset_range (off, stmt, orng, rvals))
1996 /* Set ORNG to the maximum offset representable in ptrdiff_t. */
1997 orng[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
1998 orng[0] = -orng[1] - 1;
2001 pref->add_offset (orng[0], orng[1]);
2002 return true;
2005 /* A helper of compute_objsize_r() to determine the size from SSA_NAME
2006 PTR. Return true on success and false on failure. */
2008 static bool
2009 handle_ssa_name (tree ptr, bool addr, int ostype,
2010 access_ref *pref, ssa_name_limit_t &snlim,
2011 pointer_query *qry)
2013 if (!snlim.next ())
2014 return false;
2016 /* Only process an SSA_NAME if the recursion limit has not yet
2017 been reached. */
2018 if (qry)
2020 if (++qry->depth > qry->max_depth)
2021 qry->max_depth = qry->depth;
2022 if (const access_ref *cache_ref = qry->get_ref (ptr, ostype))
2024 /* Add the number of DEREFerences accummulated so far. */
2025 const int deref = pref->deref;
2026 *pref = *cache_ref;
2027 pref->deref += deref;
2028 return true;
2032 gimple *stmt = SSA_NAME_DEF_STMT (ptr);
2033 if (is_gimple_call (stmt))
2035 /* If STMT is a call to an allocation function get the size
2036 from its argument(s). If successful, also set *PREF->REF
2037 to PTR for the caller to include in diagnostics. */
2038 wide_int wr[2];
2039 range_query *const rvals = qry ? qry->rvals : NULL;
2040 if (gimple_call_alloc_size (stmt, wr, rvals))
2042 pref->ref = ptr;
2043 pref->sizrng[0] = offset_int::from (wr[0], UNSIGNED);
2044 pref->sizrng[1] = offset_int::from (wr[1], UNSIGNED);
2045 /* Constrain both bounds to a valid size. */
2046 offset_int maxsize = wi::to_offset (max_object_size ());
2047 if (pref->sizrng[0] > maxsize)
2048 pref->sizrng[0] = maxsize;
2049 if (pref->sizrng[1] > maxsize)
2050 pref->sizrng[1] = maxsize;
2052 else
2054 /* For functions known to return one of their pointer arguments
2055 try to determine what the returned pointer points to, and on
2056 success add OFFRNG which was set to the offset added by
2057 the function (e.g., memchr) to the overall offset. */
2058 bool past_end;
2059 offset_int offrng[2];
2060 if (tree ret = gimple_call_return_array (stmt, offrng, &past_end,
2061 snlim, qry))
2063 if (!compute_objsize_r (ret, stmt, addr, ostype, pref, snlim, qry))
2064 return false;
2066 /* Cap OFFRNG[1] to at most the remaining size of
2067 the object. */
2068 offset_int remrng[2];
2069 remrng[1] = pref->size_remaining (remrng);
2070 if (remrng[1] != 0 && !past_end)
2071 /* Decrement the size for functions that never return
2072 a past-the-end pointer. */
2073 remrng[1] -= 1;
2075 if (remrng[1] < offrng[1])
2076 offrng[1] = remrng[1];
2077 pref->add_offset (offrng[0], offrng[1]);
2079 else
2081 /* For other calls that might return arbitrary pointers
2082 including into the middle of objects set the size
2083 range to maximum, clear PREF->BASE0, and also set
2084 PREF->REF to include in diagnostics. */
2085 pref->set_max_size_range ();
2086 pref->base0 = false;
2087 pref->ref = ptr;
2090 qry->put_ref (ptr, *pref, ostype);
2091 return true;
2094 if (gimple_nop_p (stmt))
2096 /* For a function argument try to determine the byte size
2097 of the array from the current function declaratation
2098 (e.g., attribute access or related). */
2099 wide_int wr[2];
2100 bool static_array = false;
2101 if (tree ref = gimple_parm_array_size (ptr, wr, &static_array))
2103 pref->parmarray = !static_array;
2104 pref->sizrng[0] = offset_int::from (wr[0], UNSIGNED);
2105 pref->sizrng[1] = offset_int::from (wr[1], UNSIGNED);
2106 pref->ref = ref;
2107 qry->put_ref (ptr, *pref, ostype);
2108 return true;
2111 pref->set_max_size_range ();
2112 pref->base0 = false;
2113 pref->ref = ptr;
2114 qry->put_ref (ptr, *pref, ostype);
2115 return true;
2118 if (gimple_code (stmt) == GIMPLE_PHI)
2120 /* Pass PTR to get_ref() via PREF. If all PHI arguments refer
2121 to the same object the function will replace it with it. */
2122 pref->ref = ptr;
2123 access_ref phi_ref = *pref;
2124 if (!pref->get_ref (NULL, &phi_ref, ostype, &snlim, qry))
2125 return false;
2126 *pref = phi_ref;
2127 qry->put_ref (ptr, *pref, ostype);
2128 return true;
2131 if (!is_gimple_assign (stmt))
2133 /* Clear BASE0 since the assigned pointer might point into
2134 the middle of the object, set the maximum size range and,
2135 if the SSA_NAME refers to a function argumnent, set
2136 PREF->REF to it. */
2137 pref->base0 = false;
2138 pref->set_max_size_range ();
2139 pref->ref = ptr;
2140 return true;
2143 tree_code code = gimple_assign_rhs_code (stmt);
2145 if (code == MAX_EXPR || code == MIN_EXPR)
2147 if (!handle_min_max_size (ptr, ostype, pref, snlim, qry))
2148 return false;
2150 qry->put_ref (ptr, *pref, ostype);
2151 return true;
2154 tree rhs = gimple_assign_rhs1 (stmt);
2156 if (code == POINTER_PLUS_EXPR
2157 && TREE_CODE (TREE_TYPE (rhs)) == POINTER_TYPE)
2159 /* Compute the size of the object first. */
2160 if (!compute_objsize_r (rhs, stmt, addr, ostype, pref, snlim, qry))
2161 return false;
2163 offset_int orng[2];
2164 tree off = gimple_assign_rhs2 (stmt);
2165 range_query *const rvals = qry ? qry->rvals : NULL;
2166 if (get_offset_range (off, stmt, orng, rvals))
2167 pref->add_offset (orng[0], orng[1]);
2168 else
2169 pref->add_max_offset ();
2171 qry->put_ref (ptr, *pref, ostype);
2172 return true;
2175 if (code == ADDR_EXPR || code == SSA_NAME)
2177 if (!compute_objsize_r (rhs, stmt, addr, ostype, pref, snlim, qry))
2178 return false;
2179 qry->put_ref (ptr, *pref, ostype);
2180 return true;
2183 if (ostype > 1 && POINTER_TYPE_P (TREE_TYPE (rhs)))
2185 /* When determining the qualifiers follow the pointer but
2186 avoid caching the result. As the pointer is added to
2187 and/or dereferenced the computed size and offset need
2188 not be meaningful for other queries involving the same
2189 pointer. */
2190 if (!compute_objsize_r (rhs, stmt, addr, ostype, pref, snlim, qry))
2191 return false;
2193 rhs = pref->ref;
2196 /* (This could also be an assignment from a nonlocal pointer.) Save
2197 PTR to mention in diagnostics but otherwise treat it as a pointer
2198 to an unknown object. */
2199 pref->ref = rhs;
2200 pref->base0 = false;
2201 pref->set_max_size_range ();
2202 return true;
2205 /* Helper to compute the size of the object referenced by the PTR
2206 expression which must have pointer type, using Object Size type
2207 OSTYPE (only the least significant 2 bits are used).
2208 On success, sets PREF->REF to the DECL of the referenced object
2209 if it's unique, otherwise to null, PREF->OFFRNG to the range of
2210 offsets into it, and PREF->SIZRNG to the range of sizes of
2211 the object(s).
2212 ADDR is true for an enclosing ADDR_EXPR.
2213 SNLIM is used to avoid visiting the same PHI operand multiple
2214 times, and, when nonnull, RVALS to determine range information.
2215 Returns true on success, false when a meaningful size (or range)
2216 cannot be determined.
2218 The function is intended for diagnostics and should not be used
2219 to influence code generation or optimization. */
2221 static bool
2222 compute_objsize_r (tree ptr, gimple *stmt, bool addr, int ostype,
2223 access_ref *pref, ssa_name_limit_t &snlim,
2224 pointer_query *qry)
2226 STRIP_NOPS (ptr);
2228 if (DECL_P (ptr))
2229 return handle_decl (ptr, addr, pref);
2231 switch (TREE_CODE (ptr))
2233 case ADDR_EXPR:
2235 tree ref = TREE_OPERAND (ptr, 0);
2236 if (!compute_objsize_r (ref, stmt, true, ostype, pref, snlim, qry))
2237 return false;
2239 --pref->deref;
2240 return true;
2243 case BIT_FIELD_REF:
2245 tree ref = TREE_OPERAND (ptr, 0);
2246 if (!compute_objsize_r (ref, stmt, addr, ostype, pref, snlim, qry))
2247 return false;
2249 offset_int off = wi::to_offset (pref->eval (TREE_OPERAND (ptr, 2)));
2250 pref->add_offset (off / BITS_PER_UNIT);
2251 return true;
2254 case ARRAY_REF:
2255 return handle_array_ref (ptr, stmt, addr, ostype, pref, snlim, qry);
2257 case COMPONENT_REF:
2258 return handle_component_ref (ptr, stmt, addr, ostype, pref, snlim, qry);
2260 case MEM_REF:
2261 return handle_mem_ref (ptr, stmt, ostype, pref, snlim, qry);
2263 case TARGET_MEM_REF:
2265 tree ref = TREE_OPERAND (ptr, 0);
2266 if (!compute_objsize_r (ref, stmt, addr, ostype, pref, snlim, qry))
2267 return false;
2269 /* TODO: Handle remaining operands. Until then, add maximum offset. */
2270 pref->ref = ptr;
2271 pref->add_max_offset ();
2272 return true;
2275 case INTEGER_CST:
2276 /* Pointer constants other than null smaller than param_min_pagesize
2277 might be the result of erroneous null pointer addition/subtraction.
2278 Unless zero is a valid address set size to zero. For null pointers,
2279 set size to the maximum for now since those may be the result of
2280 jump threading. Similarly, for values >= param_min_pagesize in
2281 order to support (type *) 0x7cdeab00. */
2282 if (integer_zerop (ptr)
2283 || wi::to_widest (ptr) >= param_min_pagesize)
2284 pref->set_max_size_range ();
2285 else if (POINTER_TYPE_P (TREE_TYPE (ptr)))
2287 tree deref_type = TREE_TYPE (TREE_TYPE (ptr));
2288 addr_space_t as = TYPE_ADDR_SPACE (deref_type);
2289 if (targetm.addr_space.zero_address_valid (as))
2290 pref->set_max_size_range ();
2291 else
2293 pref->sizrng[0] = pref->sizrng[1] = 0;
2294 pref->ref_nullptr_p = true;
2297 else
2298 pref->sizrng[0] = pref->sizrng[1] = 0;
2300 pref->ref = ptr;
2301 return true;
2303 case STRING_CST:
2304 pref->sizrng[0] = pref->sizrng[1] = TREE_STRING_LENGTH (ptr);
2305 pref->ref = ptr;
2306 return true;
2308 case POINTER_PLUS_EXPR:
2310 tree ref = TREE_OPERAND (ptr, 0);
2311 if (!compute_objsize_r (ref, stmt, addr, ostype, pref, snlim, qry))
2312 return false;
2314 /* The below only makes sense if the offset is being applied to the
2315 address of the object. */
2316 if (pref->deref != -1)
2317 return false;
2319 offset_int orng[2];
2320 tree off = pref->eval (TREE_OPERAND (ptr, 1));
2321 if (get_offset_range (off, stmt, orng, qry->rvals))
2322 pref->add_offset (orng[0], orng[1]);
2323 else
2324 pref->add_max_offset ();
2325 return true;
2328 case VIEW_CONVERT_EXPR:
2329 ptr = TREE_OPERAND (ptr, 0);
2330 return compute_objsize_r (ptr, stmt, addr, ostype, pref, snlim, qry);
2332 case SSA_NAME:
2333 return handle_ssa_name (ptr, addr, ostype, pref, snlim, qry);
2335 default:
2336 break;
2339 /* Assume all other expressions point into an unknown object
2340 of the maximum valid size. */
2341 pref->ref = ptr;
2342 pref->base0 = false;
2343 pref->set_max_size_range ();
2344 if (TREE_CODE (ptr) == SSA_NAME)
2345 qry->put_ref (ptr, *pref);
2346 return true;
2349 /* A "public" wrapper around the above. Clients should use this overload
2350 instead. */
2352 tree
2353 compute_objsize (tree ptr, gimple *stmt, int ostype, access_ref *pref,
2354 pointer_query *ptr_qry)
2356 pointer_query qry;
2357 if (ptr_qry)
2358 ptr_qry->depth = 0;
2359 else
2360 ptr_qry = &qry;
2362 /* Clear and invalidate in case *PREF is being reused. */
2363 pref->offrng[0] = pref->offrng[1] = 0;
2364 pref->sizrng[0] = pref->sizrng[1] = -1;
2366 ssa_name_limit_t snlim;
2367 if (!compute_objsize_r (ptr, stmt, false, ostype, pref, snlim, ptr_qry))
2368 return NULL_TREE;
2370 offset_int maxsize = pref->size_remaining ();
2371 if (pref->base0 && pref->offrng[0] < 0 && pref->offrng[1] >= 0)
2372 pref->offrng[0] = 0;
2373 return wide_int_to_tree (sizetype, maxsize);
2376 /* Transitional wrapper. The function should be removed once callers
2377 transition to the pointer_query API. */
2379 tree
2380 compute_objsize (tree ptr, gimple *stmt, int ostype, access_ref *pref,
2381 range_query *rvals /* = NULL */)
2383 pointer_query qry;
2384 qry.rvals = rvals;
2385 return compute_objsize (ptr, stmt, ostype, pref, &qry);
2388 /* Legacy wrapper around the above. The function should be removed
2389 once callers transition to one of the two above. */
2391 tree
2392 compute_objsize (tree ptr, gimple *stmt, int ostype, tree *pdecl /* = NULL */,
2393 tree *poff /* = NULL */, range_query *rvals /* = NULL */)
2395 /* Set the initial offsets to zero and size to negative to indicate
2396 none has been computed yet. */
2397 access_ref ref;
2398 tree size = compute_objsize (ptr, stmt, ostype, &ref, rvals);
2399 if (!size || !ref.base0)
2400 return NULL_TREE;
2402 if (pdecl)
2403 *pdecl = ref.ref;
2405 if (poff)
2406 *poff = wide_int_to_tree (ptrdiff_type_node, ref.offrng[ref.offrng[0] < 0]);
2408 return size;
2411 /* Determine the offset *FLDOFF of the first byte of a struct member
2412 of TYPE (possibly recursively) into which the byte offset OFF points,
2413 starting after the field START_AFTER if it's non-null. On success,
2414 if nonnull, set *FLDOFF to the offset of the first byte, and return
2415 the field decl. If nonnull, set *NEXTOFF to the offset of the next
2416 field (which reflects any padding between the returned field and
2417 the next). Otherwise, if no such member can be found, return null. */
2419 tree
2420 field_at_offset (tree type, tree start_after, HOST_WIDE_INT off,
2421 HOST_WIDE_INT *fldoff /* = nullptr */,
2422 HOST_WIDE_INT *nextoff /* = nullptr */)
2424 tree first_fld = TYPE_FIELDS (type);
2426 HOST_WIDE_INT offbuf = 0, nextbuf = 0;
2427 if (!fldoff)
2428 fldoff = &offbuf;
2429 if (!nextoff)
2430 nextoff = &nextbuf;
2432 *nextoff = 0;
2434 /* The field to return. */
2435 tree last_fld = NULL_TREE;
2436 /* The next field to advance to. */
2437 tree next_fld = NULL_TREE;
2439 /* NEXT_FLD's cached offset. */
2440 HOST_WIDE_INT next_pos = -1;
2442 for (tree fld = first_fld; fld; fld = next_fld)
2444 next_fld = fld;
2446 /* Advance to the next relevant data member. */
2447 next_fld = TREE_CHAIN (next_fld);
2448 while (next_fld
2449 && (TREE_CODE (next_fld) != FIELD_DECL
2450 || DECL_ARTIFICIAL (next_fld)));
2452 if (TREE_CODE (fld) != FIELD_DECL || DECL_ARTIFICIAL (fld))
2453 continue;
2455 if (fld == start_after)
2456 continue;
2458 tree fldtype = TREE_TYPE (fld);
2459 /* The offset of FLD within its immediately enclosing structure. */
2460 HOST_WIDE_INT fldpos = next_pos < 0 ? int_byte_position (fld) : next_pos;
2462 tree typesize = TYPE_SIZE_UNIT (fldtype);
2463 if (typesize && TREE_CODE (typesize) != INTEGER_CST)
2464 /* Bail if FLD is a variable length member. */
2465 return NULL_TREE;
2467 /* If the size is not available the field is a flexible array
2468 member. Treat this case as success. */
2469 HOST_WIDE_INT fldsize = (tree_fits_uhwi_p (typesize)
2470 ? tree_to_uhwi (typesize)
2471 : off);
2473 /* If OFF is beyond the end of the current field continue. */
2474 HOST_WIDE_INT fldend = fldpos + fldsize;
2475 if (fldend < off)
2476 continue;
2478 if (next_fld)
2480 /* If OFF is equal to the offset of the next field continue
2481 to it and skip the array/struct business below. */
2482 tree pos = byte_position (next_fld);
2483 if (!tree_fits_shwi_p (pos))
2484 /* Bail if NEXT_FLD is a variable length member. */
2485 return NULL_TREE;
2486 next_pos = tree_to_shwi (pos);
2487 *nextoff = *fldoff + next_pos;
2488 if (*nextoff == off && TREE_CODE (type) != UNION_TYPE)
2489 continue;
2491 else
2492 *nextoff = HOST_WIDE_INT_MAX;
2494 /* OFF refers somewhere into the current field or just past its end,
2495 which could mean it refers to the next field. */
2496 if (TREE_CODE (fldtype) == ARRAY_TYPE)
2498 /* Will be set to the offset of the first byte of the array
2499 element (which may be an array) of FLDTYPE into which
2500 OFF - FLDPOS points (which may be past ELTOFF). */
2501 HOST_WIDE_INT eltoff = 0;
2502 if (tree ft = array_elt_at_offset (fldtype, off - fldpos, &eltoff))
2503 fldtype = ft;
2504 else
2505 continue;
2507 /* Advance the position to include the array element above.
2508 If OFF - FLPOS refers to a member of FLDTYPE, the member
2509 will be determined below. */
2510 fldpos += eltoff;
2513 *fldoff += fldpos;
2515 if (TREE_CODE (fldtype) == RECORD_TYPE)
2516 /* Drill down into the current field if it's a struct. */
2517 fld = field_at_offset (fldtype, start_after, off - fldpos,
2518 fldoff, nextoff);
2520 last_fld = fld;
2522 /* Unless the offset is just past the end of the field return it.
2523 Otherwise save it and return it only if the offset of the next
2524 next field is greater (i.e., there is padding between the two)
2525 or if there is no next field. */
2526 if (off < fldend)
2527 break;
2530 if (*nextoff == HOST_WIDE_INT_MAX && next_fld)
2531 *nextoff = next_pos;
2533 return last_fld;
2536 /* Determine the offset *ELTOFF of the first byte of the array element
2537 of array ARTYPE into which the byte offset OFF points. On success
2538 set *ELTOFF to the offset of the first byte and return type.
2539 Otherwise, if no such element can be found, return null. */
2541 tree
2542 array_elt_at_offset (tree artype, HOST_WIDE_INT off,
2543 HOST_WIDE_INT *eltoff /* = nullptr */,
2544 HOST_WIDE_INT *subar_size /* = nullptr */)
2546 gcc_assert (TREE_CODE (artype) == ARRAY_TYPE);
2548 HOST_WIDE_INT dummy;
2549 if (!eltoff)
2550 eltoff = &dummy;
2551 if (!subar_size)
2552 subar_size = &dummy;
2554 tree eltype = artype;
2555 while (TREE_CODE (TREE_TYPE (eltype)) == ARRAY_TYPE)
2556 eltype = TREE_TYPE (eltype);
2558 tree subartype = eltype;
2559 if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (eltype))
2560 || TYPE_MODE (TREE_TYPE (eltype)) != TYPE_MODE (char_type_node))
2561 eltype = TREE_TYPE (eltype);
2563 *subar_size = int_size_in_bytes (subartype);
2565 if (eltype == artype)
2567 *eltoff = 0;
2568 return artype;
2571 HOST_WIDE_INT artype_size = int_size_in_bytes (artype);
2572 HOST_WIDE_INT eltype_size = int_size_in_bytes (eltype);
2574 if (off < artype_size)// * eltype_size)
2576 *eltoff = (off / eltype_size) * eltype_size;
2577 return TREE_CODE (eltype) == ARRAY_TYPE ? TREE_TYPE (eltype) : eltype;
2580 return NULL_TREE;
2583 /* Wrapper around build_array_type_nelts that makes sure the array
2584 can be created at all and handles zero sized arrays specially. */
2586 tree
2587 build_printable_array_type (tree eltype, unsigned HOST_WIDE_INT nelts)
2589 if (TYPE_SIZE_UNIT (eltype)
2590 && TREE_CODE (TYPE_SIZE_UNIT (eltype)) == INTEGER_CST
2591 && !integer_zerop (TYPE_SIZE_UNIT (eltype))
2592 && TYPE_ALIGN_UNIT (eltype) > 1
2593 && wi::zext (wi::to_wide (TYPE_SIZE_UNIT (eltype)),
2594 ffs_hwi (TYPE_ALIGN_UNIT (eltype)) - 1) != 0)
2595 eltype = TYPE_MAIN_VARIANT (eltype);
2597 /* Consider excessive NELTS an array of unknown bound. */
2598 tree idxtype = NULL_TREE;
2599 if (nelts < HOST_WIDE_INT_MAX)
2601 if (nelts)
2602 return build_array_type_nelts (eltype, nelts);
2603 idxtype = build_range_type (sizetype, size_zero_node, NULL_TREE);
2606 tree arrtype = build_array_type (eltype, idxtype);
2607 arrtype = build_distinct_type_copy (TYPE_MAIN_VARIANT (arrtype));
2608 TYPE_SIZE (arrtype) = bitsize_zero_node;
2609 TYPE_SIZE_UNIT (arrtype) = size_zero_node;
2610 return arrtype;