Don't warn when alignment of global common data exceeds maximum alignment.
[official-gcc.git] / gcc / pointer-query.cc
blob99caf78bfa7c715f925114ada9efb646451b2e9f
1 /* Definitions of the pointer_query and related classes.
3 Copyright (C) 2020-2021 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
10 version.
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
15 for more details.
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 #include "config.h"
22 #include "system.h"
23 #include "coretypes.h"
24 #include "backend.h"
25 #include "tree.h"
26 #include "gimple.h"
27 #include "stringpool.h"
28 #include "tree-vrp.h"
29 #include "diagnostic-core.h"
30 #include "fold-const.h"
31 #include "tree-object-size.h"
32 #include "tree-ssa-strlen.h"
33 #include "langhooks.h"
34 #include "stringpool.h"
35 #include "attribs.h"
36 #include "gimple-fold.h"
37 #include "intl.h"
38 #include "attr-fnspec.h"
39 #include "gimple-range.h"
40 #include "pointer-query.h"
42 static bool compute_objsize_r (tree, int, access_ref *, ssa_name_limit_t &,
43 pointer_query *);
45 /* Wrapper around the wide_int overload of get_range that accepts
46 offset_int instead. For middle end expressions returns the same
47 result. For a subset of nonconstamt expressions emitted by the front
48 end determines a more precise range than would be possible otherwise. */
50 static bool
51 get_offset_range (tree x, gimple *stmt, offset_int r[2], range_query *rvals)
53 offset_int add = 0;
54 if (TREE_CODE (x) == PLUS_EXPR)
56 /* Handle constant offsets in pointer addition expressions seen
57 n the front end IL. */
58 tree op = TREE_OPERAND (x, 1);
59 if (TREE_CODE (op) == INTEGER_CST)
61 op = fold_convert (signed_type_for (TREE_TYPE (op)), op);
62 add = wi::to_offset (op);
63 x = TREE_OPERAND (x, 0);
67 if (TREE_CODE (x) == NOP_EXPR)
68 /* Also handle conversions to sizetype seen in the front end IL. */
69 x = TREE_OPERAND (x, 0);
71 tree type = TREE_TYPE (x);
72 if (!INTEGRAL_TYPE_P (type) && !POINTER_TYPE_P (type))
73 return false;
75 if (TREE_CODE (x) != INTEGER_CST
76 && TREE_CODE (x) != SSA_NAME)
78 if (TYPE_UNSIGNED (type)
79 && TYPE_PRECISION (type) == TYPE_PRECISION (sizetype))
80 type = signed_type_for (type);
82 r[0] = wi::to_offset (TYPE_MIN_VALUE (type)) + add;
83 r[1] = wi::to_offset (TYPE_MAX_VALUE (type)) + add;
84 return x;
87 wide_int wr[2];
88 if (!get_range (x, stmt, wr, rvals))
89 return false;
91 signop sgn = SIGNED;
92 /* Only convert signed integers or unsigned sizetype to a signed
93 offset and avoid converting large positive values in narrower
94 types to negative offsets. */
95 if (TYPE_UNSIGNED (type)
96 && wr[0].get_precision () < TYPE_PRECISION (sizetype))
97 sgn = UNSIGNED;
99 r[0] = offset_int::from (wr[0], sgn);
100 r[1] = offset_int::from (wr[1], sgn);
101 return true;
104 /* Return the argument that the call STMT to a built-in function returns
105 or null if it doesn't. On success, set OFFRNG[] to the range of offsets
106 from the argument reflected in the value returned by the built-in if it
107 can be determined, otherwise to 0 and HWI_M1U respectively. Set
108 *PAST_END for functions like mempcpy that might return a past the end
109 pointer (most functions return a dereferenceable pointer to an existing
110 element of an array). */
112 static tree
113 gimple_call_return_array (gimple *stmt, offset_int offrng[2], bool *past_end,
114 range_query *rvals)
116 /* Clear and set below for the rare function(s) that might return
117 a past-the-end pointer. */
118 *past_end = false;
121 /* Check for attribute fn spec to see if the function returns one
122 of its arguments. */
123 attr_fnspec fnspec = gimple_call_fnspec (as_a <gcall *>(stmt));
124 unsigned int argno;
125 if (fnspec.returns_arg (&argno))
127 /* Functions return the first argument (not a range). */
128 offrng[0] = offrng[1] = 0;
129 return gimple_call_arg (stmt, argno);
133 if (gimple_call_num_args (stmt) < 1)
134 return NULL_TREE;
136 tree fn = gimple_call_fndecl (stmt);
137 if (!gimple_call_builtin_p (stmt, BUILT_IN_NORMAL))
139 /* See if this is a call to placement new. */
140 if (!fn
141 || !DECL_IS_OPERATOR_NEW_P (fn)
142 || DECL_IS_REPLACEABLE_OPERATOR_NEW_P (fn))
143 return NULL_TREE;
145 /* Check the mangling, keeping in mind that operator new takes
146 a size_t which could be unsigned int or unsigned long. */
147 tree fname = DECL_ASSEMBLER_NAME (fn);
148 if (!id_equal (fname, "_ZnwjPv") // ordinary form
149 && !id_equal (fname, "_ZnwmPv") // ordinary form
150 && !id_equal (fname, "_ZnajPv") // array form
151 && !id_equal (fname, "_ZnamPv")) // array form
152 return NULL_TREE;
154 if (gimple_call_num_args (stmt) != 2)
155 return NULL_TREE;
157 /* Allocation functions return a pointer to the beginning. */
158 offrng[0] = offrng[1] = 0;
159 return gimple_call_arg (stmt, 1);
162 switch (DECL_FUNCTION_CODE (fn))
164 case BUILT_IN_MEMCPY:
165 case BUILT_IN_MEMCPY_CHK:
166 case BUILT_IN_MEMMOVE:
167 case BUILT_IN_MEMMOVE_CHK:
168 case BUILT_IN_MEMSET:
169 case BUILT_IN_STRCAT:
170 case BUILT_IN_STRCAT_CHK:
171 case BUILT_IN_STRCPY:
172 case BUILT_IN_STRCPY_CHK:
173 case BUILT_IN_STRNCAT:
174 case BUILT_IN_STRNCAT_CHK:
175 case BUILT_IN_STRNCPY:
176 case BUILT_IN_STRNCPY_CHK:
177 /* Functions return the first argument (not a range). */
178 offrng[0] = offrng[1] = 0;
179 return gimple_call_arg (stmt, 0);
181 case BUILT_IN_MEMPCPY:
182 case BUILT_IN_MEMPCPY_CHK:
184 /* The returned pointer is in a range constrained by the smaller
185 of the upper bound of the size argument and the source object
186 size. */
187 offrng[0] = 0;
188 offrng[1] = HOST_WIDE_INT_M1U;
189 tree off = gimple_call_arg (stmt, 2);
190 bool off_valid = get_offset_range (off, stmt, offrng, rvals);
191 if (!off_valid || offrng[0] != offrng[1])
193 /* If the offset is either indeterminate or in some range,
194 try to constrain its upper bound to at most the size
195 of the source object. */
196 access_ref aref;
197 tree src = gimple_call_arg (stmt, 1);
198 if (compute_objsize (src, 1, &aref, rvals)
199 && aref.sizrng[1] < offrng[1])
200 offrng[1] = aref.sizrng[1];
203 /* Mempcpy may return a past-the-end pointer. */
204 *past_end = true;
205 return gimple_call_arg (stmt, 0);
208 case BUILT_IN_MEMCHR:
210 tree off = gimple_call_arg (stmt, 2);
211 if (get_offset_range (off, stmt, offrng, rvals))
212 offrng[1] -= 1;
213 else
214 offrng[1] = HOST_WIDE_INT_M1U;
216 offrng[0] = 0;
217 return gimple_call_arg (stmt, 0);
220 case BUILT_IN_STRCHR:
221 case BUILT_IN_STRRCHR:
222 case BUILT_IN_STRSTR:
223 offrng[0] = 0;
224 offrng[1] = HOST_WIDE_INT_M1U;
225 return gimple_call_arg (stmt, 0);
227 case BUILT_IN_STPCPY:
228 case BUILT_IN_STPCPY_CHK:
230 access_ref aref;
231 tree src = gimple_call_arg (stmt, 1);
232 if (compute_objsize (src, 1, &aref, rvals))
233 offrng[1] = aref.sizrng[1] - 1;
234 else
235 offrng[1] = HOST_WIDE_INT_M1U;
237 offrng[0] = 0;
238 return gimple_call_arg (stmt, 0);
241 case BUILT_IN_STPNCPY:
242 case BUILT_IN_STPNCPY_CHK:
244 /* The returned pointer is in a range between the first argument
245 and it plus the smaller of the upper bound of the size argument
246 and the source object size. */
247 offrng[1] = HOST_WIDE_INT_M1U;
248 tree off = gimple_call_arg (stmt, 2);
249 if (!get_offset_range (off, stmt, offrng, rvals)
250 || offrng[0] != offrng[1])
252 /* If the offset is either indeterminate or in some range,
253 try to constrain its upper bound to at most the size
254 of the source object. */
255 access_ref aref;
256 tree src = gimple_call_arg (stmt, 1);
257 if (compute_objsize (src, 1, &aref, rvals)
258 && aref.sizrng[1] < offrng[1])
259 offrng[1] = aref.sizrng[1];
262 /* When the source is the empty string the returned pointer is
263 a copy of the argument. Otherwise stpcpy can also return
264 a past-the-end pointer. */
265 offrng[0] = 0;
266 *past_end = true;
267 return gimple_call_arg (stmt, 0);
270 default:
271 break;
274 return NULL_TREE;
277 /* Return true when EXP's range can be determined and set RANGE[] to it
278 after adjusting it if necessary to make EXP a represents a valid size
279 of object, or a valid size argument to an allocation function declared
280 with attribute alloc_size (whose argument may be signed), or to a string
281 manipulation function like memset.
282 When ALLOW_ZERO is set in FLAGS, allow returning a range of [0, 0] for
283 a size in an anti-range [1, N] where N > PTRDIFF_MAX. A zero range is
284 a (nearly) invalid argument to allocation functions like malloc but it
285 is a valid argument to functions like memset.
286 When USE_LARGEST is set in FLAGS set RANGE to the largest valid subrange
287 in a multi-range, otherwise to the smallest valid subrange. */
289 bool
290 get_size_range (range_query *query, tree exp, gimple *stmt, tree range[2],
291 int flags /* = 0 */)
293 if (!exp)
294 return false;
296 if (tree_fits_uhwi_p (exp))
298 /* EXP is a constant. */
299 range[0] = range[1] = exp;
300 return true;
303 tree exptype = TREE_TYPE (exp);
304 bool integral = INTEGRAL_TYPE_P (exptype);
306 wide_int min, max;
307 enum value_range_kind range_type;
309 if (!query)
310 query = get_global_range_query ();
312 if (integral)
314 value_range vr;
316 query->range_of_expr (vr, exp, stmt);
318 if (vr.undefined_p ())
319 vr.set_varying (TREE_TYPE (exp));
320 range_type = vr.kind ();
321 min = wi::to_wide (vr.min ());
322 max = wi::to_wide (vr.max ());
324 else
325 range_type = VR_VARYING;
327 if (range_type == VR_VARYING)
329 if (integral)
331 /* Use the full range of the type of the expression when
332 no value range information is available. */
333 range[0] = TYPE_MIN_VALUE (exptype);
334 range[1] = TYPE_MAX_VALUE (exptype);
335 return true;
338 range[0] = NULL_TREE;
339 range[1] = NULL_TREE;
340 return false;
343 unsigned expprec = TYPE_PRECISION (exptype);
345 bool signed_p = !TYPE_UNSIGNED (exptype);
347 if (range_type == VR_ANTI_RANGE)
349 if (signed_p)
351 if (wi::les_p (max, 0))
353 /* EXP is not in a strictly negative range. That means
354 it must be in some (not necessarily strictly) positive
355 range which includes zero. Since in signed to unsigned
356 conversions negative values end up converted to large
357 positive values, and otherwise they are not valid sizes,
358 the resulting range is in both cases [0, TYPE_MAX]. */
359 min = wi::zero (expprec);
360 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
362 else if (wi::les_p (min - 1, 0))
364 /* EXP is not in a negative-positive range. That means EXP
365 is either negative, or greater than max. Since negative
366 sizes are invalid make the range [MAX + 1, TYPE_MAX]. */
367 min = max + 1;
368 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
370 else
372 max = min - 1;
373 min = wi::zero (expprec);
376 else
378 wide_int maxsize = wi::to_wide (max_object_size ());
379 min = wide_int::from (min, maxsize.get_precision (), UNSIGNED);
380 max = wide_int::from (max, maxsize.get_precision (), UNSIGNED);
381 if (wi::eq_p (0, min - 1))
383 /* EXP is unsigned and not in the range [1, MAX]. That means
384 it's either zero or greater than MAX. Even though 0 would
385 normally be detected by -Walloc-zero, unless ALLOW_ZERO
386 is set, set the range to [MAX, TYPE_MAX] so that when MAX
387 is greater than the limit the whole range is diagnosed. */
388 wide_int maxsize = wi::to_wide (max_object_size ());
389 if (flags & SR_ALLOW_ZERO)
391 if (wi::leu_p (maxsize, max + 1)
392 || !(flags & SR_USE_LARGEST))
393 min = max = wi::zero (expprec);
394 else
396 min = max + 1;
397 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
400 else
402 min = max + 1;
403 max = wi::to_wide (TYPE_MAX_VALUE (exptype));
406 else if ((flags & SR_USE_LARGEST)
407 && wi::ltu_p (max + 1, maxsize))
409 /* When USE_LARGEST is set and the larger of the two subranges
410 is a valid size, use it... */
411 min = max + 1;
412 max = maxsize;
414 else
416 /* ...otherwise use the smaller subrange. */
417 max = min - 1;
418 min = wi::zero (expprec);
423 range[0] = wide_int_to_tree (exptype, min);
424 range[1] = wide_int_to_tree (exptype, max);
426 return true;
429 bool
430 get_size_range (tree exp, tree range[2], int flags /* = 0 */)
432 return get_size_range (/*query=*/NULL, exp, /*stmt=*/NULL, range, flags);
435 /* If STMT is a call to an allocation function, returns the constant
436 maximum size of the object allocated by the call represented as
437 sizetype. If nonnull, sets RNG1[] to the range of the size.
438 When nonnull, uses RVALS for range information, otherwise gets global
439 range info.
440 Returns null when STMT is not a call to a valid allocation function. */
442 tree
443 gimple_call_alloc_size (gimple *stmt, wide_int rng1[2] /* = NULL */,
444 range_query * /* = NULL */)
446 if (!stmt || !is_gimple_call (stmt))
447 return NULL_TREE;
449 tree allocfntype;
450 if (tree fndecl = gimple_call_fndecl (stmt))
451 allocfntype = TREE_TYPE (fndecl);
452 else
453 allocfntype = gimple_call_fntype (stmt);
455 if (!allocfntype)
456 return NULL_TREE;
458 unsigned argidx1 = UINT_MAX, argidx2 = UINT_MAX;
459 tree at = lookup_attribute ("alloc_size", TYPE_ATTRIBUTES (allocfntype));
460 if (!at)
462 if (!gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN))
463 return NULL_TREE;
465 argidx1 = 0;
468 unsigned nargs = gimple_call_num_args (stmt);
470 if (argidx1 == UINT_MAX)
472 tree atval = TREE_VALUE (at);
473 if (!atval)
474 return NULL_TREE;
476 argidx1 = TREE_INT_CST_LOW (TREE_VALUE (atval)) - 1;
477 if (nargs <= argidx1)
478 return NULL_TREE;
480 atval = TREE_CHAIN (atval);
481 if (atval)
483 argidx2 = TREE_INT_CST_LOW (TREE_VALUE (atval)) - 1;
484 if (nargs <= argidx2)
485 return NULL_TREE;
489 tree size = gimple_call_arg (stmt, argidx1);
491 wide_int rng1_buf[2];
492 /* If RNG1 is not set, use the buffer. */
493 if (!rng1)
494 rng1 = rng1_buf;
496 /* Use maximum precision to avoid overflow below. */
497 const int prec = ADDR_MAX_PRECISION;
500 tree r[2];
501 /* Determine the largest valid range size, including zero. */
502 if (!get_size_range (size, r, SR_ALLOW_ZERO | SR_USE_LARGEST))
503 return NULL_TREE;
504 rng1[0] = wi::to_wide (r[0], prec);
505 rng1[1] = wi::to_wide (r[1], prec);
508 if (argidx2 > nargs && TREE_CODE (size) == INTEGER_CST)
509 return fold_convert (sizetype, size);
511 /* To handle ranges do the math in wide_int and return the product
512 of the upper bounds as a constant. Ignore anti-ranges. */
513 tree n = argidx2 < nargs ? gimple_call_arg (stmt, argidx2) : integer_one_node;
514 wide_int rng2[2];
516 tree r[2];
517 /* As above, use the full non-negative range on failure. */
518 if (!get_size_range (n, r, SR_ALLOW_ZERO | SR_USE_LARGEST))
519 return NULL_TREE;
520 rng2[0] = wi::to_wide (r[0], prec);
521 rng2[1] = wi::to_wide (r[1], prec);
524 /* Compute products of both bounds for the caller but return the lesser
525 of SIZE_MAX and the product of the upper bounds as a constant. */
526 rng1[0] = rng1[0] * rng2[0];
527 rng1[1] = rng1[1] * rng2[1];
529 const tree size_max = TYPE_MAX_VALUE (sizetype);
530 if (wi::gtu_p (rng1[1], wi::to_wide (size_max, prec)))
532 rng1[1] = wi::to_wide (size_max, prec);
533 return size_max;
536 return wide_int_to_tree (sizetype, rng1[1]);
539 /* For an access to an object referenced to by the function parameter PTR
540 of pointer type, and set RNG[] to the range of sizes of the object
541 obtainedfrom the attribute access specification for the current function.
542 Set STATIC_ARRAY if the array parameter has been declared [static].
543 Return the function parameter on success and null otherwise. */
545 tree
546 gimple_parm_array_size (tree ptr, wide_int rng[2],
547 bool *static_array /* = NULL */)
549 /* For a function argument try to determine the byte size of the array
550 from the current function declaratation (e.g., attribute access or
551 related). */
552 tree var = SSA_NAME_VAR (ptr);
553 if (TREE_CODE (var) != PARM_DECL)
554 return NULL_TREE;
556 const unsigned prec = TYPE_PRECISION (sizetype);
558 rdwr_map rdwr_idx;
559 attr_access *access = get_parm_access (rdwr_idx, var);
560 if (!access)
561 return NULL_TREE;
563 if (access->sizarg != UINT_MAX)
565 /* TODO: Try to extract the range from the argument based on
566 those of subsequent assertions or based on known calls to
567 the current function. */
568 return NULL_TREE;
571 if (!access->minsize)
572 return NULL_TREE;
574 /* Only consider ordinary array bound at level 2 (or above if it's
575 ever added). */
576 if (warn_array_parameter < 2 && !access->static_p)
577 return NULL_TREE;
579 if (static_array)
580 *static_array = access->static_p;
582 rng[0] = wi::zero (prec);
583 rng[1] = wi::uhwi (access->minsize, prec);
584 /* Multiply the array bound encoded in the attribute by the size
585 of what the pointer argument to which it decays points to. */
586 tree eltype = TREE_TYPE (TREE_TYPE (ptr));
587 tree size = TYPE_SIZE_UNIT (eltype);
588 if (!size || TREE_CODE (size) != INTEGER_CST)
589 return NULL_TREE;
591 rng[1] *= wi::to_wide (size, prec);
592 return var;
595 access_ref::access_ref (tree bound /* = NULL_TREE */,
596 bool minaccess /* = false */)
597 : ref (), eval ([](tree x){ return x; }), deref (), trail1special (true),
598 base0 (true), parmarray ()
600 /* Set to valid. */
601 offrng[0] = offrng[1] = 0;
602 offmax[0] = offmax[1] = 0;
603 /* Invalidate. */
604 sizrng[0] = sizrng[1] = -1;
606 /* Set the default bounds of the access and adjust below. */
607 bndrng[0] = minaccess ? 1 : 0;
608 bndrng[1] = HOST_WIDE_INT_M1U;
610 /* When BOUND is nonnull and a range can be extracted from it,
611 set the bounds of the access to reflect both it and MINACCESS.
612 BNDRNG[0] is the size of the minimum access. */
613 tree rng[2];
614 if (bound && get_size_range (bound, rng, SR_ALLOW_ZERO))
616 bndrng[0] = wi::to_offset (rng[0]);
617 bndrng[1] = wi::to_offset (rng[1]);
618 bndrng[0] = bndrng[0] > 0 && minaccess ? 1 : 0;
622 /* Return the PHI node REF refers to or null if it doesn't. */
624 gphi *
625 access_ref::phi () const
627 if (!ref || TREE_CODE (ref) != SSA_NAME)
628 return NULL;
630 gimple *def_stmt = SSA_NAME_DEF_STMT (ref);
631 if (gimple_code (def_stmt) != GIMPLE_PHI)
632 return NULL;
634 return as_a <gphi *> (def_stmt);
637 /* Determine and return the largest object to which *THIS. If *THIS
638 refers to a PHI and PREF is nonnull, fill *PREF with the details
639 of the object determined by compute_objsize(ARG, OSTYPE) for each
640 PHI argument ARG. */
642 tree
643 access_ref::get_ref (vec<access_ref> *all_refs,
644 access_ref *pref /* = NULL */,
645 int ostype /* = 1 */,
646 ssa_name_limit_t *psnlim /* = NULL */,
647 pointer_query *qry /* = NULL */) const
649 gphi *phi_stmt = this->phi ();
650 if (!phi_stmt)
651 return ref;
653 /* FIXME: Calling get_ref() with a null PSNLIM is dangerous and might
654 cause unbounded recursion. */
655 ssa_name_limit_t snlim_buf;
656 if (!psnlim)
657 psnlim = &snlim_buf;
659 if (!psnlim->visit_phi (ref))
660 return NULL_TREE;
662 /* Reflects the range of offsets of all PHI arguments refer to the same
663 object (i.e., have the same REF). */
664 access_ref same_ref;
665 /* The conservative result of the PHI reflecting the offset and size
666 of the largest PHI argument, regardless of whether or not they all
667 refer to the same object. */
668 pointer_query empty_qry;
669 if (!qry)
670 qry = &empty_qry;
672 access_ref phi_ref;
673 if (pref)
675 phi_ref = *pref;
676 same_ref = *pref;
679 /* Set if any argument is a function array (or VLA) parameter not
680 declared [static]. */
681 bool parmarray = false;
682 /* The size of the smallest object referenced by the PHI arguments. */
683 offset_int minsize = 0;
684 const offset_int maxobjsize = wi::to_offset (max_object_size ());
685 /* The offset of the PHI, not reflecting those of its arguments. */
686 const offset_int orng[2] = { phi_ref.offrng[0], phi_ref.offrng[1] };
688 const unsigned nargs = gimple_phi_num_args (phi_stmt);
689 for (unsigned i = 0; i < nargs; ++i)
691 access_ref phi_arg_ref;
692 tree arg = gimple_phi_arg_def (phi_stmt, i);
693 if (!compute_objsize_r (arg, ostype, &phi_arg_ref, *psnlim, qry)
694 || phi_arg_ref.sizrng[0] < 0)
695 /* A PHI with all null pointer arguments. */
696 return NULL_TREE;
698 /* Add PREF's offset to that of the argument. */
699 phi_arg_ref.add_offset (orng[0], orng[1]);
700 if (TREE_CODE (arg) == SSA_NAME)
701 qry->put_ref (arg, phi_arg_ref);
703 if (all_refs)
704 all_refs->safe_push (phi_arg_ref);
706 const bool arg_known_size = (phi_arg_ref.sizrng[0] != 0
707 || phi_arg_ref.sizrng[1] != maxobjsize);
709 parmarray |= phi_arg_ref.parmarray;
711 const bool nullp = integer_zerop (arg) && (i || i + 1 < nargs);
713 if (phi_ref.sizrng[0] < 0)
715 if (!nullp)
716 same_ref = phi_arg_ref;
717 phi_ref = phi_arg_ref;
718 if (arg_known_size)
719 minsize = phi_arg_ref.sizrng[0];
720 continue;
723 const bool phi_known_size = (phi_ref.sizrng[0] != 0
724 || phi_ref.sizrng[1] != maxobjsize);
726 if (phi_known_size && phi_arg_ref.sizrng[0] < minsize)
727 minsize = phi_arg_ref.sizrng[0];
729 /* Disregard null pointers in PHIs with two or more arguments.
730 TODO: Handle this better! */
731 if (nullp)
732 continue;
734 /* Determine the amount of remaining space in the argument. */
735 offset_int argrem[2];
736 argrem[1] = phi_arg_ref.size_remaining (argrem);
738 /* Determine the amount of remaining space computed so far and
739 if the remaining space in the argument is more use it instead. */
740 offset_int phirem[2];
741 phirem[1] = phi_ref.size_remaining (phirem);
743 if (phi_arg_ref.ref != same_ref.ref)
744 same_ref.ref = NULL_TREE;
746 if (phirem[1] < argrem[1]
747 || (phirem[1] == argrem[1]
748 && phi_ref.sizrng[1] < phi_arg_ref.sizrng[1]))
749 /* Use the argument with the most space remaining as the result,
750 or the larger one if the space is equal. */
751 phi_ref = phi_arg_ref;
753 /* Set SAME_REF.OFFRNG to the maximum range of all arguments. */
754 if (phi_arg_ref.offrng[0] < same_ref.offrng[0])
755 same_ref.offrng[0] = phi_arg_ref.offrng[0];
756 if (same_ref.offrng[1] < phi_arg_ref.offrng[1])
757 same_ref.offrng[1] = phi_arg_ref.offrng[1];
760 if (!same_ref.ref && same_ref.offrng[0] != 0)
761 /* Clear BASE0 if not all the arguments refer to the same object and
762 if not all their offsets are zero-based. This allows the final
763 PHI offset to out of bounds for some arguments but not for others
764 (or negative even of all the arguments are BASE0), which is overly
765 permissive. */
766 phi_ref.base0 = false;
768 if (same_ref.ref)
769 phi_ref = same_ref;
770 else
772 /* Replace the lower bound of the largest argument with the size
773 of the smallest argument, and set PARMARRAY if any argument
774 was one. */
775 phi_ref.sizrng[0] = minsize;
776 phi_ref.parmarray = parmarray;
779 if (phi_ref.sizrng[0] < 0)
781 /* Fail if none of the PHI's arguments resulted in updating PHI_REF
782 (perhaps because they have all been already visited by prior
783 recursive calls). */
784 psnlim->leave_phi (ref);
785 return NULL_TREE;
788 /* Avoid changing *THIS. */
789 if (pref && pref != this)
790 *pref = phi_ref;
792 psnlim->leave_phi (ref);
794 return phi_ref.ref;
797 /* Return the maximum amount of space remaining and if non-null, set
798 argument to the minimum. */
800 offset_int
801 access_ref::size_remaining (offset_int *pmin /* = NULL */) const
803 offset_int minbuf;
804 if (!pmin)
805 pmin = &minbuf;
807 /* add_offset() ensures the offset range isn't inverted. */
808 gcc_checking_assert (offrng[0] <= offrng[1]);
810 if (base0)
812 /* The offset into referenced object is zero-based (i.e., it's
813 not referenced by a pointer into middle of some unknown object). */
814 if (offrng[0] < 0 && offrng[1] < 0)
816 /* If the offset is negative the remaining size is zero. */
817 *pmin = 0;
818 return 0;
821 if (sizrng[1] <= offrng[0])
823 /* If the starting offset is greater than or equal to the upper
824 bound on the size of the object, the space remaining is zero.
825 As a special case, if it's equal, set *PMIN to -1 to let
826 the caller know the offset is valid and just past the end. */
827 *pmin = sizrng[1] == offrng[0] ? -1 : 0;
828 return 0;
831 /* Otherwise return the size minus the lower bound of the offset. */
832 offset_int or0 = offrng[0] < 0 ? 0 : offrng[0];
834 *pmin = sizrng[0] - or0;
835 return sizrng[1] - or0;
838 /* The offset to the referenced object isn't zero-based (i.e., it may
839 refer to a byte other than the first. The size of such an object
840 is constrained only by the size of the address space (the result
841 of max_object_size()). */
842 if (sizrng[1] <= offrng[0])
844 *pmin = 0;
845 return 0;
848 offset_int or0 = offrng[0] < 0 ? 0 : offrng[0];
850 *pmin = sizrng[0] - or0;
851 return sizrng[1] - or0;
854 /* Return true if the offset and object size are in range for SIZE. */
856 bool
857 access_ref::offset_in_range (const offset_int &size) const
859 if (size_remaining () < size)
860 return false;
862 if (base0)
863 return offmax[0] >= 0 && offmax[1] <= sizrng[1];
865 offset_int maxoff = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
866 return offmax[0] > -maxoff && offmax[1] < maxoff;
869 /* Add the range [MIN, MAX] to the offset range. For known objects (with
870 zero-based offsets) at least one of whose offset's bounds is in range,
871 constrain the other (or both) to the bounds of the object (i.e., zero
872 and the upper bound of its size). This improves the quality of
873 diagnostics. */
875 void access_ref::add_offset (const offset_int &min, const offset_int &max)
877 if (min <= max)
879 /* To add an ordinary range just add it to the bounds. */
880 offrng[0] += min;
881 offrng[1] += max;
883 else if (!base0)
885 /* To add an inverted range to an offset to an unknown object
886 expand it to the maximum. */
887 add_max_offset ();
888 return;
890 else
892 /* To add an inverted range to an offset to an known object set
893 the upper bound to the maximum representable offset value
894 (which may be greater than MAX_OBJECT_SIZE).
895 The lower bound is either the sum of the current offset and
896 MIN when abs(MAX) is greater than the former, or zero otherwise.
897 Zero because then then inverted range includes the negative of
898 the lower bound. */
899 offset_int maxoff = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
900 offrng[1] = maxoff;
902 if (max >= 0)
904 offrng[0] = 0;
905 if (offmax[0] > 0)
906 offmax[0] = 0;
907 return;
910 offset_int absmax = wi::abs (max);
911 if (offrng[0] < absmax)
913 offrng[0] += min;
914 /* Cap the lower bound at the upper (set to MAXOFF above)
915 to avoid inadvertently recreating an inverted range. */
916 if (offrng[1] < offrng[0])
917 offrng[0] = offrng[1];
919 else
920 offrng[0] = 0;
923 /* Set the minimum and maximmum computed so far. */
924 if (offrng[1] < 0 && offrng[1] < offmax[0])
925 offmax[0] = offrng[1];
926 if (offrng[0] > 0 && offrng[0] > offmax[1])
927 offmax[1] = offrng[0];
929 if (!base0)
930 return;
932 /* When referencing a known object check to see if the offset computed
933 so far is in bounds... */
934 offset_int remrng[2];
935 remrng[1] = size_remaining (remrng);
936 if (remrng[1] > 0 || remrng[0] < 0)
938 /* ...if so, constrain it so that neither bound exceeds the size of
939 the object. Out of bounds offsets are left unchanged, and, for
940 better or worse, become in bounds later. They should be detected
941 and diagnosed at the point they first become invalid by
942 -Warray-bounds. */
943 if (offrng[0] < 0)
944 offrng[0] = 0;
945 if (offrng[1] > sizrng[1])
946 offrng[1] = sizrng[1];
950 /* Issue one inform message describing each target of an access REF.
951 WRITE is set for a write access and clear for a read access. */
953 void
954 access_ref::inform_access (access_mode mode) const
956 const access_ref &aref = *this;
957 if (!aref.ref)
958 return;
960 if (aref.phi ())
962 /* Set MAXREF to refer to the largest object and fill ALL_REFS
963 with data for all objects referenced by the PHI arguments. */
964 access_ref maxref;
965 auto_vec<access_ref> all_refs;
966 if (!get_ref (&all_refs, &maxref))
967 return;
969 /* Except for MAXREF, the rest of the arguments' offsets need not
970 reflect one added to the PHI itself. Determine the latter from
971 MAXREF on which the result is based. */
972 const offset_int orng[] =
974 offrng[0] - maxref.offrng[0],
975 wi::smax (offrng[1] - maxref.offrng[1], offrng[0]),
978 /* Add the final PHI's offset to that of each of the arguments
979 and recurse to issue an inform message for it. */
980 for (unsigned i = 0; i != all_refs.length (); ++i)
982 /* Skip any PHIs; those could lead to infinite recursion. */
983 if (all_refs[i].phi ())
984 continue;
986 all_refs[i].add_offset (orng[0], orng[1]);
987 all_refs[i].inform_access (mode);
989 return;
992 /* Convert offset range and avoid including a zero range since it
993 isn't necessarily meaningful. */
994 HOST_WIDE_INT diff_min = tree_to_shwi (TYPE_MIN_VALUE (ptrdiff_type_node));
995 HOST_WIDE_INT diff_max = tree_to_shwi (TYPE_MAX_VALUE (ptrdiff_type_node));
996 HOST_WIDE_INT minoff;
997 HOST_WIDE_INT maxoff = diff_max;
998 if (wi::fits_shwi_p (aref.offrng[0]))
999 minoff = aref.offrng[0].to_shwi ();
1000 else
1001 minoff = aref.offrng[0] < 0 ? diff_min : diff_max;
1003 if (wi::fits_shwi_p (aref.offrng[1]))
1004 maxoff = aref.offrng[1].to_shwi ();
1006 if (maxoff <= diff_min || maxoff >= diff_max)
1007 /* Avoid mentioning an upper bound that's equal to or in excess
1008 of the maximum of ptrdiff_t. */
1009 maxoff = minoff;
1011 /* Convert size range and always include it since all sizes are
1012 meaningful. */
1013 unsigned long long minsize = 0, maxsize = 0;
1014 if (wi::fits_shwi_p (aref.sizrng[0])
1015 && wi::fits_shwi_p (aref.sizrng[1]))
1017 minsize = aref.sizrng[0].to_shwi ();
1018 maxsize = aref.sizrng[1].to_shwi ();
1021 /* SIZRNG doesn't necessarily have the same range as the allocation
1022 size determined by gimple_call_alloc_size (). */
1023 char sizestr[80];
1024 if (minsize == maxsize)
1025 sprintf (sizestr, "%llu", minsize);
1026 else
1027 sprintf (sizestr, "[%llu, %llu]", minsize, maxsize);
1029 char offstr[80];
1030 if (minoff == 0
1031 && (maxoff == 0 || aref.sizrng[1] <= maxoff))
1032 offstr[0] = '\0';
1033 else if (minoff == maxoff)
1034 sprintf (offstr, "%lli", (long long) minoff);
1035 else
1036 sprintf (offstr, "[%lli, %lli]", (long long) minoff, (long long) maxoff);
1038 location_t loc = UNKNOWN_LOCATION;
1040 tree ref = this->ref;
1041 tree allocfn = NULL_TREE;
1042 if (TREE_CODE (ref) == SSA_NAME)
1044 gimple *stmt = SSA_NAME_DEF_STMT (ref);
1045 if (is_gimple_call (stmt))
1047 loc = gimple_location (stmt);
1048 if (gimple_call_builtin_p (stmt, BUILT_IN_ALLOCA_WITH_ALIGN))
1050 /* Strip the SSA_NAME suffix from the variable name and
1051 recreate an identifier with the VLA's original name. */
1052 ref = gimple_call_lhs (stmt);
1053 if (SSA_NAME_IDENTIFIER (ref))
1055 ref = SSA_NAME_IDENTIFIER (ref);
1056 const char *id = IDENTIFIER_POINTER (ref);
1057 size_t len = strcspn (id, ".$");
1058 if (!len)
1059 len = strlen (id);
1060 ref = get_identifier_with_length (id, len);
1063 else
1065 /* Except for VLAs, retrieve the allocation function. */
1066 allocfn = gimple_call_fndecl (stmt);
1067 if (!allocfn)
1068 allocfn = gimple_call_fn (stmt);
1069 if (TREE_CODE (allocfn) == SSA_NAME)
1071 /* For an ALLOC_CALL via a function pointer make a small
1072 effort to determine the destination of the pointer. */
1073 gimple *def = SSA_NAME_DEF_STMT (allocfn);
1074 if (gimple_assign_single_p (def))
1076 tree rhs = gimple_assign_rhs1 (def);
1077 if (DECL_P (rhs))
1078 allocfn = rhs;
1079 else if (TREE_CODE (rhs) == COMPONENT_REF)
1080 allocfn = TREE_OPERAND (rhs, 1);
1085 else if (gimple_nop_p (stmt))
1086 /* Handle DECL_PARM below. */
1087 ref = SSA_NAME_VAR (ref);
1090 if (DECL_P (ref))
1091 loc = DECL_SOURCE_LOCATION (ref);
1092 else if (EXPR_P (ref) && EXPR_HAS_LOCATION (ref))
1093 loc = EXPR_LOCATION (ref);
1094 else if (TREE_CODE (ref) != IDENTIFIER_NODE
1095 && TREE_CODE (ref) != SSA_NAME)
1096 return;
1098 if (mode == access_read_write || mode == access_write_only)
1100 if (allocfn == NULL_TREE)
1102 if (*offstr)
1103 inform (loc, "at offset %s into destination object %qE of size %s",
1104 offstr, ref, sizestr);
1105 else
1106 inform (loc, "destination object %qE of size %s", ref, sizestr);
1107 return;
1110 if (*offstr)
1111 inform (loc,
1112 "at offset %s into destination object of size %s "
1113 "allocated by %qE", offstr, sizestr, allocfn);
1114 else
1115 inform (loc, "destination object of size %s allocated by %qE",
1116 sizestr, allocfn);
1117 return;
1120 if (mode == access_read_only)
1122 if (allocfn == NULL_TREE)
1124 if (*offstr)
1125 inform (loc, "at offset %s into source object %qE of size %s",
1126 offstr, ref, sizestr);
1127 else
1128 inform (loc, "source object %qE of size %s", ref, sizestr);
1130 return;
1133 if (*offstr)
1134 inform (loc,
1135 "at offset %s into source object of size %s allocated by %qE",
1136 offstr, sizestr, allocfn);
1137 else
1138 inform (loc, "source object of size %s allocated by %qE",
1139 sizestr, allocfn);
1140 return;
1143 if (allocfn == NULL_TREE)
1145 if (*offstr)
1146 inform (loc, "at offset %s into object %qE of size %s",
1147 offstr, ref, sizestr);
1148 else
1149 inform (loc, "object %qE of size %s", ref, sizestr);
1151 return;
1154 if (*offstr)
1155 inform (loc,
1156 "at offset %s into object of size %s allocated by %qE",
1157 offstr, sizestr, allocfn);
1158 else
1159 inform (loc, "object of size %s allocated by %qE",
1160 sizestr, allocfn);
1163 /* Set a bit for the PHI in VISITED and return true if it wasn't
1164 already set. */
1166 bool
1167 ssa_name_limit_t::visit_phi (tree ssa_name)
1169 if (!visited)
1170 visited = BITMAP_ALLOC (NULL);
1172 /* Return false if SSA_NAME has already been visited. */
1173 return bitmap_set_bit (visited, SSA_NAME_VERSION (ssa_name));
1176 /* Clear a bit for the PHI in VISITED. */
1178 void
1179 ssa_name_limit_t::leave_phi (tree ssa_name)
1181 /* Return false if SSA_NAME has already been visited. */
1182 bitmap_clear_bit (visited, SSA_NAME_VERSION (ssa_name));
1185 /* Return false if the SSA_NAME chain length counter has reached
1186 the limit, otherwise increment the counter and return true. */
1188 bool
1189 ssa_name_limit_t::next ()
1191 /* Return a negative value to let caller avoid recursing beyond
1192 the specified limit. */
1193 if (ssa_def_max == 0)
1194 return false;
1196 --ssa_def_max;
1197 return true;
1200 /* If the SSA_NAME has already been "seen" return a positive value.
1201 Otherwise add it to VISITED. If the SSA_NAME limit has been
1202 reached, return a negative value. Otherwise return zero. */
1205 ssa_name_limit_t::next_phi (tree ssa_name)
1208 gimple *def_stmt = SSA_NAME_DEF_STMT (ssa_name);
1209 /* Return a positive value if the PHI has already been visited. */
1210 if (gimple_code (def_stmt) == GIMPLE_PHI
1211 && !visit_phi (ssa_name))
1212 return 1;
1215 /* Return a negative value to let caller avoid recursing beyond
1216 the specified limit. */
1217 if (ssa_def_max == 0)
1218 return -1;
1220 --ssa_def_max;
1222 return 0;
1225 ssa_name_limit_t::~ssa_name_limit_t ()
1227 if (visited)
1228 BITMAP_FREE (visited);
1231 /* Default ctor. Initialize object with pointers to the range_query
1232 and cache_type instances to use or null. */
1234 pointer_query::pointer_query (range_query *qry /* = NULL */,
1235 cache_type *cache /* = NULL */)
1236 : rvals (qry), var_cache (cache), hits (), misses (),
1237 failures (), depth (), max_depth ()
1239 /* No op. */
1242 /* Return a pointer to the cached access_ref instance for the SSA_NAME
1243 PTR if it's there or null otherwise. */
1245 const access_ref *
1246 pointer_query::get_ref (tree ptr, int ostype /* = 1 */) const
1248 if (!var_cache)
1250 ++misses;
1251 return NULL;
1254 unsigned version = SSA_NAME_VERSION (ptr);
1255 unsigned idx = version << 1 | (ostype & 1);
1256 if (var_cache->indices.length () <= idx)
1258 ++misses;
1259 return NULL;
1262 unsigned cache_idx = var_cache->indices[idx];
1263 if (var_cache->access_refs.length () <= cache_idx)
1265 ++misses;
1266 return NULL;
1269 access_ref &cache_ref = var_cache->access_refs[cache_idx];
1270 if (cache_ref.ref)
1272 ++hits;
1273 return &cache_ref;
1276 ++misses;
1277 return NULL;
1280 /* Retrieve the access_ref instance for a variable from the cache if it's
1281 there or compute it and insert it into the cache if it's nonnonull. */
1283 bool
1284 pointer_query::get_ref (tree ptr, access_ref *pref, int ostype /* = 1 */)
1286 const unsigned version
1287 = TREE_CODE (ptr) == SSA_NAME ? SSA_NAME_VERSION (ptr) : 0;
1289 if (var_cache && version)
1291 unsigned idx = version << 1 | (ostype & 1);
1292 if (idx < var_cache->indices.length ())
1294 unsigned cache_idx = var_cache->indices[idx] - 1;
1295 if (cache_idx < var_cache->access_refs.length ()
1296 && var_cache->access_refs[cache_idx].ref)
1298 ++hits;
1299 *pref = var_cache->access_refs[cache_idx];
1300 return true;
1304 ++misses;
1307 if (!compute_objsize (ptr, ostype, pref, this))
1309 ++failures;
1310 return false;
1313 return true;
1316 /* Add a copy of the access_ref REF for the SSA_NAME to the cache if it's
1317 nonnull. */
1319 void
1320 pointer_query::put_ref (tree ptr, const access_ref &ref, int ostype /* = 1 */)
1322 /* Only add populated/valid entries. */
1323 if (!var_cache || !ref.ref || ref.sizrng[0] < 0)
1324 return;
1326 /* Add REF to the two-level cache. */
1327 unsigned version = SSA_NAME_VERSION (ptr);
1328 unsigned idx = version << 1 | (ostype & 1);
1330 /* Grow INDICES if necessary. An index is valid if it's nonzero.
1331 Its value minus one is the index into ACCESS_REFS. Not all
1332 entries are valid. */
1333 if (var_cache->indices.length () <= idx)
1334 var_cache->indices.safe_grow_cleared (idx + 1);
1336 if (!var_cache->indices[idx])
1337 var_cache->indices[idx] = var_cache->access_refs.length () + 1;
1339 /* Grow ACCESS_REF cache if necessary. An entry is valid if its
1340 REF member is nonnull. All entries except for the last two
1341 are valid. Once nonnull, the REF value must stay unchanged. */
1342 unsigned cache_idx = var_cache->indices[idx];
1343 if (var_cache->access_refs.length () <= cache_idx)
1344 var_cache->access_refs.safe_grow_cleared (cache_idx + 1);
1346 access_ref cache_ref = var_cache->access_refs[cache_idx - 1];
1347 if (cache_ref.ref)
1349 gcc_checking_assert (cache_ref.ref == ref.ref);
1350 return;
1353 cache_ref = ref;
1356 /* Flush the cache if it's nonnull. */
1358 void
1359 pointer_query::flush_cache ()
1361 if (!var_cache)
1362 return;
1363 var_cache->indices.release ();
1364 var_cache->access_refs.release ();
1367 /* A helper of compute_objsize_r() to determine the size from an assignment
1368 statement STMT with the RHS of either MIN_EXPR or MAX_EXPR. */
1370 static bool
1371 handle_min_max_size (gimple *stmt, int ostype, access_ref *pref,
1372 ssa_name_limit_t &snlim, pointer_query *qry)
1374 tree_code code = gimple_assign_rhs_code (stmt);
1376 tree ptr = gimple_assign_rhs1 (stmt);
1378 /* In a valid MAX_/MIN_EXPR both operands must refer to the same array.
1379 Determine the size/offset of each and use the one with more or less
1380 space remaining, respectively. If either fails, use the information
1381 determined from the other instead, adjusted up or down as appropriate
1382 for the expression. */
1383 access_ref aref[2] = { *pref, *pref };
1384 if (!compute_objsize_r (ptr, ostype, &aref[0], snlim, qry))
1386 aref[0].base0 = false;
1387 aref[0].offrng[0] = aref[0].offrng[1] = 0;
1388 aref[0].add_max_offset ();
1389 aref[0].set_max_size_range ();
1392 ptr = gimple_assign_rhs2 (stmt);
1393 if (!compute_objsize_r (ptr, ostype, &aref[1], snlim, qry))
1395 aref[1].base0 = false;
1396 aref[1].offrng[0] = aref[1].offrng[1] = 0;
1397 aref[1].add_max_offset ();
1398 aref[1].set_max_size_range ();
1401 if (!aref[0].ref && !aref[1].ref)
1402 /* Fail if the identity of neither argument could be determined. */
1403 return false;
1405 bool i0 = false;
1406 if (aref[0].ref && aref[0].base0)
1408 if (aref[1].ref && aref[1].base0)
1410 /* If the object referenced by both arguments has been determined
1411 set *PREF to the one with more or less space remainng, whichever
1412 is appopriate for CODE.
1413 TODO: Indicate when the objects are distinct so it can be
1414 diagnosed. */
1415 i0 = code == MAX_EXPR;
1416 const bool i1 = !i0;
1418 if (aref[i0].size_remaining () < aref[i1].size_remaining ())
1419 *pref = aref[i1];
1420 else
1421 *pref = aref[i0];
1422 return true;
1425 /* If only the object referenced by one of the arguments could be
1426 determined, use it and... */
1427 *pref = aref[0];
1428 i0 = true;
1430 else
1431 *pref = aref[1];
1433 const bool i1 = !i0;
1434 /* ...see if the offset obtained from the other pointer can be used
1435 to tighten up the bound on the offset obtained from the first. */
1436 if ((code == MAX_EXPR && aref[i1].offrng[1] < aref[i0].offrng[0])
1437 || (code == MIN_EXPR && aref[i0].offrng[0] < aref[i1].offrng[1]))
1439 pref->offrng[0] = aref[i0].offrng[0];
1440 pref->offrng[1] = aref[i0].offrng[1];
1442 return true;
1445 /* A helper of compute_objsize_r() to determine the size from ARRAY_REF
1446 AREF. ADDR is true if PTR is the operand of ADDR_EXPR. Return true
1447 on success and false on failure. */
1449 static bool
1450 handle_array_ref (tree aref, bool addr, int ostype, access_ref *pref,
1451 ssa_name_limit_t &snlim, pointer_query *qry)
1453 gcc_assert (TREE_CODE (aref) == ARRAY_REF);
1455 ++pref->deref;
1457 tree arefop = TREE_OPERAND (aref, 0);
1458 tree reftype = TREE_TYPE (arefop);
1459 if (!addr && TREE_CODE (TREE_TYPE (reftype)) == POINTER_TYPE)
1460 /* Avoid arrays of pointers. FIXME: Hande pointers to arrays
1461 of known bound. */
1462 return false;
1464 if (!compute_objsize_r (arefop, ostype, pref, snlim, qry))
1465 return false;
1467 offset_int orng[2];
1468 tree off = pref->eval (TREE_OPERAND (aref, 1));
1469 range_query *const rvals = qry ? qry->rvals : NULL;
1470 if (!get_offset_range (off, NULL, orng, rvals))
1472 /* Set ORNG to the maximum offset representable in ptrdiff_t. */
1473 orng[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
1474 orng[0] = -orng[1] - 1;
1477 /* Convert the array index range determined above to a byte
1478 offset. */
1479 tree lowbnd = array_ref_low_bound (aref);
1480 if (!integer_zerop (lowbnd) && tree_fits_uhwi_p (lowbnd))
1482 /* Adjust the index by the low bound of the array domain
1483 (normally zero but 1 in Fortran). */
1484 unsigned HOST_WIDE_INT lb = tree_to_uhwi (lowbnd);
1485 orng[0] -= lb;
1486 orng[1] -= lb;
1489 tree eltype = TREE_TYPE (aref);
1490 tree tpsize = TYPE_SIZE_UNIT (eltype);
1491 if (!tpsize || TREE_CODE (tpsize) != INTEGER_CST)
1493 pref->add_max_offset ();
1494 return true;
1497 offset_int sz = wi::to_offset (tpsize);
1498 orng[0] *= sz;
1499 orng[1] *= sz;
1501 if (ostype && TREE_CODE (eltype) == ARRAY_TYPE)
1503 /* Except for the permissive raw memory functions which use
1504 the size of the whole object determined above, use the size
1505 of the referenced array. Because the overall offset is from
1506 the beginning of the complete array object add this overall
1507 offset to the size of array. */
1508 offset_int sizrng[2] =
1510 pref->offrng[0] + orng[0] + sz,
1511 pref->offrng[1] + orng[1] + sz
1513 if (sizrng[1] < sizrng[0])
1514 std::swap (sizrng[0], sizrng[1]);
1515 if (sizrng[0] >= 0 && sizrng[0] <= pref->sizrng[0])
1516 pref->sizrng[0] = sizrng[0];
1517 if (sizrng[1] >= 0 && sizrng[1] <= pref->sizrng[1])
1518 pref->sizrng[1] = sizrng[1];
1521 pref->add_offset (orng[0], orng[1]);
1522 return true;
1525 /* A helper of compute_objsize_r() to determine the size from MEM_REF
1526 MREF. Return true on success and false on failure. */
1528 static bool
1529 handle_mem_ref (tree mref, int ostype, access_ref *pref,
1530 ssa_name_limit_t &snlim, pointer_query *qry)
1532 gcc_assert (TREE_CODE (mref) == MEM_REF);
1534 ++pref->deref;
1536 if (VECTOR_TYPE_P (TREE_TYPE (mref)))
1538 /* Hack: Handle MEM_REFs of vector types as those to complete
1539 objects; those may be synthesized from multiple assignments
1540 to consecutive data members (see PR 93200 and 96963).
1541 FIXME: Vectorized assignments should only be present after
1542 vectorization so this hack is only necessary after it has
1543 run and could be avoided in calls from prior passes (e.g.,
1544 tree-ssa-strlen.c).
1545 FIXME: Deal with this more generally, e.g., by marking up
1546 such MEM_REFs at the time they're created. */
1547 ostype = 0;
1550 tree mrefop = TREE_OPERAND (mref, 0);
1551 if (!compute_objsize_r (mrefop, ostype, pref, snlim, qry))
1552 return false;
1554 offset_int orng[2];
1555 tree off = pref->eval (TREE_OPERAND (mref, 1));
1556 range_query *const rvals = qry ? qry->rvals : NULL;
1557 if (!get_offset_range (off, NULL, orng, rvals))
1559 /* Set ORNG to the maximum offset representable in ptrdiff_t. */
1560 orng[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
1561 orng[0] = -orng[1] - 1;
1564 pref->add_offset (orng[0], orng[1]);
1565 return true;
1568 /* Helper to compute the size of the object referenced by the PTR
1569 expression which must have pointer type, using Object Size type
1570 OSTYPE (only the least significant 2 bits are used).
1571 On success, sets PREF->REF to the DECL of the referenced object
1572 if it's unique, otherwise to null, PREF->OFFRNG to the range of
1573 offsets into it, and PREF->SIZRNG to the range of sizes of
1574 the object(s).
1575 SNLIM is used to avoid visiting the same PHI operand multiple
1576 times, and, when nonnull, RVALS to determine range information.
1577 Returns true on success, false when a meaningful size (or range)
1578 cannot be determined.
1580 The function is intended for diagnostics and should not be used
1581 to influence code generation or optimization. */
1583 static bool
1584 compute_objsize_r (tree ptr, int ostype, access_ref *pref,
1585 ssa_name_limit_t &snlim, pointer_query *qry)
1587 STRIP_NOPS (ptr);
1589 const bool addr = TREE_CODE (ptr) == ADDR_EXPR;
1590 if (addr)
1592 --pref->deref;
1593 ptr = TREE_OPERAND (ptr, 0);
1596 if (DECL_P (ptr))
1598 pref->ref = ptr;
1600 if (!addr && POINTER_TYPE_P (TREE_TYPE (ptr)))
1602 /* Set the maximum size if the reference is to the pointer
1603 itself (as opposed to what it points to), and clear
1604 BASE0 since the offset isn't necessarily zero-based. */
1605 pref->set_max_size_range ();
1606 pref->base0 = false;
1607 return true;
1610 if (tree size = decl_init_size (ptr, false))
1611 if (TREE_CODE (size) == INTEGER_CST)
1613 pref->sizrng[0] = pref->sizrng[1] = wi::to_offset (size);
1614 return true;
1617 pref->set_max_size_range ();
1618 return true;
1621 const tree_code code = TREE_CODE (ptr);
1622 range_query *const rvals = qry ? qry->rvals : NULL;
1624 if (code == BIT_FIELD_REF)
1626 tree ref = TREE_OPERAND (ptr, 0);
1627 if (!compute_objsize_r (ref, ostype, pref, snlim, qry))
1628 return false;
1630 offset_int off = wi::to_offset (pref->eval (TREE_OPERAND (ptr, 2)));
1631 pref->add_offset (off / BITS_PER_UNIT);
1632 return true;
1635 if (code == COMPONENT_REF)
1637 tree ref = TREE_OPERAND (ptr, 0);
1638 if (TREE_CODE (TREE_TYPE (ref)) == UNION_TYPE)
1639 /* In accesses through union types consider the entire unions
1640 rather than just their members. */
1641 ostype = 0;
1642 tree field = TREE_OPERAND (ptr, 1);
1644 if (ostype == 0)
1646 /* In OSTYPE zero (for raw memory functions like memcpy), use
1647 the maximum size instead if the identity of the enclosing
1648 object cannot be determined. */
1649 if (!compute_objsize_r (ref, ostype, pref, snlim, qry))
1650 return false;
1652 /* Otherwise, use the size of the enclosing object and add
1653 the offset of the member to the offset computed so far. */
1654 tree offset = byte_position (field);
1655 if (TREE_CODE (offset) == INTEGER_CST)
1656 pref->add_offset (wi::to_offset (offset));
1657 else
1658 pref->add_max_offset ();
1660 if (!pref->ref)
1661 /* REF may have been already set to an SSA_NAME earlier
1662 to provide better context for diagnostics. In that case,
1663 leave it unchanged. */
1664 pref->ref = ref;
1665 return true;
1668 pref->ref = field;
1670 if (!addr && POINTER_TYPE_P (TREE_TYPE (field)))
1672 /* Set maximum size if the reference is to the pointer member
1673 itself (as opposed to what it points to). */
1674 pref->set_max_size_range ();
1675 return true;
1678 /* SAM is set for array members that might need special treatment. */
1679 special_array_member sam;
1680 tree size = component_ref_size (ptr, &sam);
1681 if (sam == special_array_member::int_0)
1682 pref->sizrng[0] = pref->sizrng[1] = 0;
1683 else if (!pref->trail1special && sam == special_array_member::trail_1)
1684 pref->sizrng[0] = pref->sizrng[1] = 1;
1685 else if (size && TREE_CODE (size) == INTEGER_CST)
1686 pref->sizrng[0] = pref->sizrng[1] = wi::to_offset (size);
1687 else
1689 /* When the size of the member is unknown it's either a flexible
1690 array member or a trailing special array member (either zero
1691 length or one-element). Set the size to the maximum minus
1692 the constant size of the type. */
1693 pref->sizrng[0] = 0;
1694 pref->sizrng[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node));
1695 if (tree recsize = TYPE_SIZE_UNIT (TREE_TYPE (ref)))
1696 if (TREE_CODE (recsize) == INTEGER_CST)
1697 pref->sizrng[1] -= wi::to_offset (recsize);
1699 return true;
1702 if (code == ARRAY_REF)
1703 return handle_array_ref (ptr, addr, ostype, pref, snlim, qry);
1705 if (code == MEM_REF)
1706 return handle_mem_ref (ptr, ostype, pref, snlim, qry);
1708 if (code == TARGET_MEM_REF)
1710 tree ref = TREE_OPERAND (ptr, 0);
1711 if (!compute_objsize_r (ref, ostype, pref, snlim, qry))
1712 return false;
1714 /* TODO: Handle remaining operands. Until then, add maximum offset. */
1715 pref->ref = ptr;
1716 pref->add_max_offset ();
1717 return true;
1720 if (code == INTEGER_CST)
1722 /* Pointer constants other than null are most likely the result
1723 of erroneous null pointer addition/subtraction. Set size to
1724 zero. For null pointers, set size to the maximum for now
1725 since those may be the result of jump threading. */
1726 if (integer_zerop (ptr))
1727 pref->set_max_size_range ();
1728 else
1729 pref->sizrng[0] = pref->sizrng[1] = 0;
1730 pref->ref = ptr;
1732 return true;
1735 if (code == STRING_CST)
1737 pref->sizrng[0] = pref->sizrng[1] = TREE_STRING_LENGTH (ptr);
1738 pref->ref = ptr;
1739 return true;
1742 if (code == POINTER_PLUS_EXPR)
1744 tree ref = TREE_OPERAND (ptr, 0);
1745 if (!compute_objsize_r (ref, ostype, pref, snlim, qry))
1746 return false;
1748 /* Clear DEREF since the offset is being applied to the target
1749 of the dereference. */
1750 pref->deref = 0;
1752 offset_int orng[2];
1753 tree off = pref->eval (TREE_OPERAND (ptr, 1));
1754 if (get_offset_range (off, NULL, orng, rvals))
1755 pref->add_offset (orng[0], orng[1]);
1756 else
1757 pref->add_max_offset ();
1758 return true;
1761 if (code == VIEW_CONVERT_EXPR)
1763 ptr = TREE_OPERAND (ptr, 0);
1764 return compute_objsize_r (ptr, ostype, pref, snlim, qry);
1767 if (code == SSA_NAME)
1769 if (!snlim.next ())
1770 return false;
1772 /* Only process an SSA_NAME if the recursion limit has not yet
1773 been reached. */
1774 if (qry)
1776 if (++qry->depth)
1777 qry->max_depth = qry->depth;
1778 if (const access_ref *cache_ref = qry->get_ref (ptr))
1780 /* If the pointer is in the cache set *PREF to what it refers
1781 to and return success. */
1782 *pref = *cache_ref;
1783 return true;
1787 gimple *stmt = SSA_NAME_DEF_STMT (ptr);
1788 if (is_gimple_call (stmt))
1790 /* If STMT is a call to an allocation function get the size
1791 from its argument(s). If successful, also set *PREF->REF
1792 to PTR for the caller to include in diagnostics. */
1793 wide_int wr[2];
1794 if (gimple_call_alloc_size (stmt, wr, rvals))
1796 pref->ref = ptr;
1797 pref->sizrng[0] = offset_int::from (wr[0], UNSIGNED);
1798 pref->sizrng[1] = offset_int::from (wr[1], UNSIGNED);
1799 /* Constrain both bounds to a valid size. */
1800 offset_int maxsize = wi::to_offset (max_object_size ());
1801 if (pref->sizrng[0] > maxsize)
1802 pref->sizrng[0] = maxsize;
1803 if (pref->sizrng[1] > maxsize)
1804 pref->sizrng[1] = maxsize;
1806 else
1808 /* For functions known to return one of their pointer arguments
1809 try to determine what the returned pointer points to, and on
1810 success add OFFRNG which was set to the offset added by
1811 the function (e.g., memchr) to the overall offset. */
1812 bool past_end;
1813 offset_int offrng[2];
1814 if (tree ret = gimple_call_return_array (stmt, offrng,
1815 &past_end, rvals))
1817 if (!compute_objsize_r (ret, ostype, pref, snlim, qry))
1818 return false;
1820 /* Cap OFFRNG[1] to at most the remaining size of
1821 the object. */
1822 offset_int remrng[2];
1823 remrng[1] = pref->size_remaining (remrng);
1824 if (remrng[1] != 0 && !past_end)
1825 /* Decrement the size for functions that never return
1826 a past-the-end pointer. */
1827 remrng[1] -= 1;
1829 if (remrng[1] < offrng[1])
1830 offrng[1] = remrng[1];
1831 pref->add_offset (offrng[0], offrng[1]);
1833 else
1835 /* For other calls that might return arbitrary pointers
1836 including into the middle of objects set the size
1837 range to maximum, clear PREF->BASE0, and also set
1838 PREF->REF to include in diagnostics. */
1839 pref->set_max_size_range ();
1840 pref->base0 = false;
1841 pref->ref = ptr;
1844 qry->put_ref (ptr, *pref);
1845 return true;
1848 if (gimple_nop_p (stmt))
1850 /* For a function argument try to determine the byte size
1851 of the array from the current function declaratation
1852 (e.g., attribute access or related). */
1853 wide_int wr[2];
1854 bool static_array = false;
1855 if (tree ref = gimple_parm_array_size (ptr, wr, &static_array))
1857 pref->parmarray = !static_array;
1858 pref->sizrng[0] = offset_int::from (wr[0], UNSIGNED);
1859 pref->sizrng[1] = offset_int::from (wr[1], UNSIGNED);
1860 pref->ref = ref;
1861 qry->put_ref (ptr, *pref);
1862 return true;
1865 pref->set_max_size_range ();
1866 pref->base0 = false;
1867 pref->ref = ptr;
1868 qry->put_ref (ptr, *pref);
1869 return true;
1872 if (gimple_code (stmt) == GIMPLE_PHI)
1874 pref->ref = ptr;
1875 access_ref phi_ref = *pref;
1876 if (!pref->get_ref (NULL, &phi_ref, ostype, &snlim, qry))
1877 return false;
1878 *pref = phi_ref;
1879 pref->ref = ptr;
1880 qry->put_ref (ptr, *pref);
1881 return true;
1884 if (!is_gimple_assign (stmt))
1886 /* Clear BASE0 since the assigned pointer might point into
1887 the middle of the object, set the maximum size range and,
1888 if the SSA_NAME refers to a function argumnent, set
1889 PREF->REF to it. */
1890 pref->base0 = false;
1891 pref->set_max_size_range ();
1892 pref->ref = ptr;
1893 return true;
1896 tree_code code = gimple_assign_rhs_code (stmt);
1898 if (code == MAX_EXPR || code == MIN_EXPR)
1900 if (!handle_min_max_size (stmt, ostype, pref, snlim, qry))
1901 return false;
1902 qry->put_ref (ptr, *pref);
1903 return true;
1906 tree rhs = gimple_assign_rhs1 (stmt);
1908 if (code == ASSERT_EXPR)
1910 rhs = TREE_OPERAND (rhs, 0);
1911 return compute_objsize_r (rhs, ostype, pref, snlim, qry);
1914 if (code == POINTER_PLUS_EXPR
1915 && TREE_CODE (TREE_TYPE (rhs)) == POINTER_TYPE)
1917 /* Compute the size of the object first. */
1918 if (!compute_objsize_r (rhs, ostype, pref, snlim, qry))
1919 return false;
1921 offset_int orng[2];
1922 tree off = gimple_assign_rhs2 (stmt);
1923 if (get_offset_range (off, stmt, orng, rvals))
1924 pref->add_offset (orng[0], orng[1]);
1925 else
1926 pref->add_max_offset ();
1927 qry->put_ref (ptr, *pref);
1928 return true;
1931 if (code == ADDR_EXPR
1932 || code == SSA_NAME)
1933 return compute_objsize_r (rhs, ostype, pref, snlim, qry);
1935 /* (This could also be an assignment from a nonlocal pointer.) Save
1936 PTR to mention in diagnostics but otherwise treat it as a pointer
1937 to an unknown object. */
1938 pref->ref = rhs;
1939 pref->base0 = false;
1940 pref->set_max_size_range ();
1941 return true;
1944 /* Assume all other expressions point into an unknown object
1945 of the maximum valid size. */
1946 pref->ref = ptr;
1947 pref->base0 = false;
1948 pref->set_max_size_range ();
1949 if (TREE_CODE (ptr) == SSA_NAME)
1950 qry->put_ref (ptr, *pref);
1951 return true;
1954 /* A "public" wrapper around the above. Clients should use this overload
1955 instead. */
1957 tree
1958 compute_objsize (tree ptr, int ostype, access_ref *pref,
1959 range_query *rvals /* = NULL */)
1961 pointer_query qry;
1962 qry.rvals = rvals;
1963 ssa_name_limit_t snlim;
1964 if (!compute_objsize_r (ptr, ostype, pref, snlim, &qry))
1965 return NULL_TREE;
1967 offset_int maxsize = pref->size_remaining ();
1968 if (pref->base0 && pref->offrng[0] < 0 && pref->offrng[1] >= 0)
1969 pref->offrng[0] = 0;
1970 return wide_int_to_tree (sizetype, maxsize);
1973 /* Transitional wrapper. The function should be removed once callers
1974 transition to the pointer_query API. */
1976 tree
1977 compute_objsize (tree ptr, int ostype, access_ref *pref, pointer_query *ptr_qry)
1979 pointer_query qry;
1980 if (ptr_qry)
1981 ptr_qry->depth = 0;
1982 else
1983 ptr_qry = &qry;
1985 ssa_name_limit_t snlim;
1986 if (!compute_objsize_r (ptr, ostype, pref, snlim, ptr_qry))
1987 return NULL_TREE;
1989 offset_int maxsize = pref->size_remaining ();
1990 if (pref->base0 && pref->offrng[0] < 0 && pref->offrng[1] >= 0)
1991 pref->offrng[0] = 0;
1992 return wide_int_to_tree (sizetype, maxsize);
1995 /* Legacy wrapper around the above. The function should be removed
1996 once callers transition to one of the two above. */
1998 tree
1999 compute_objsize (tree ptr, int ostype, tree *pdecl /* = NULL */,
2000 tree *poff /* = NULL */, range_query *rvals /* = NULL */)
2002 /* Set the initial offsets to zero and size to negative to indicate
2003 none has been computed yet. */
2004 access_ref ref;
2005 tree size = compute_objsize (ptr, ostype, &ref, rvals);
2006 if (!size || !ref.base0)
2007 return NULL_TREE;
2009 if (pdecl)
2010 *pdecl = ref.ref;
2012 if (poff)
2013 *poff = wide_int_to_tree (ptrdiff_type_node, ref.offrng[ref.offrng[0] < 0]);
2015 return size;