1 /* Definitions of the pointer_query and related classes.
3 Copyright (C) 2020-2023 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "stringpool.h"
29 #include "diagnostic-core.h"
30 #include "fold-const.h"
31 #include "tree-object-size.h"
32 #include "tree-ssa-strlen.h"
33 #include "langhooks.h"
34 #include "stringpool.h"
36 #include "gimple-iterator.h"
37 #include "gimple-fold.h"
38 #include "gimple-ssa.h"
40 #include "attr-fnspec.h"
41 #include "gimple-range.h"
42 #include "pointer-query.h"
43 #include "tree-pretty-print.h"
44 #include "tree-ssanames.h"
47 static bool compute_objsize_r (tree
, gimple
*, bool, int, access_ref
*,
48 ssa_name_limit_t
&, pointer_query
*);
50 /* Wrapper around the wide_int overload of get_range that accepts
51 offset_int instead. For middle end expressions returns the same
52 result. For a subset of nonconstamt expressions emitted by the front
53 end determines a more precise range than would be possible otherwise. */
56 get_offset_range (tree x
, gimple
*stmt
, offset_int r
[2], range_query
*rvals
)
59 if (TREE_CODE (x
) == PLUS_EXPR
)
61 /* Handle constant offsets in pointer addition expressions seen
62 n the front end IL. */
63 tree op
= TREE_OPERAND (x
, 1);
64 if (TREE_CODE (op
) == INTEGER_CST
)
66 op
= fold_convert (signed_type_for (TREE_TYPE (op
)), op
);
67 add
= wi::to_offset (op
);
68 x
= TREE_OPERAND (x
, 0);
72 if (TREE_CODE (x
) == NOP_EXPR
)
73 /* Also handle conversions to sizetype seen in the front end IL. */
74 x
= TREE_OPERAND (x
, 0);
76 tree type
= TREE_TYPE (x
);
77 if (!INTEGRAL_TYPE_P (type
) && !POINTER_TYPE_P (type
))
80 if (TREE_CODE (x
) != INTEGER_CST
81 && TREE_CODE (x
) != SSA_NAME
)
83 if (TYPE_UNSIGNED (type
)
84 && TYPE_PRECISION (type
) == TYPE_PRECISION (sizetype
))
85 type
= signed_type_for (type
);
87 r
[0] = wi::to_offset (TYPE_MIN_VALUE (type
)) + add
;
88 r
[1] = wi::to_offset (TYPE_MAX_VALUE (type
)) + add
;
93 if (!get_range (x
, stmt
, wr
, rvals
))
97 /* Only convert signed integers or unsigned sizetype to a signed
98 offset and avoid converting large positive values in narrower
99 types to negative offsets. */
100 if (TYPE_UNSIGNED (type
)
101 && wr
[0].get_precision () < TYPE_PRECISION (sizetype
))
104 r
[0] = offset_int::from (wr
[0], sgn
);
105 r
[1] = offset_int::from (wr
[1], sgn
);
109 /* Return the argument that the call STMT to a built-in function returns
110 or null if it doesn't. On success, set OFFRNG[] to the range of offsets
111 from the argument reflected in the value returned by the built-in if it
112 can be determined, otherwise to 0 and HWI_M1U respectively. Set
113 *PAST_END for functions like mempcpy that might return a past the end
114 pointer (most functions return a dereferenceable pointer to an existing
115 element of an array). */
118 gimple_call_return_array (gimple
*stmt
, offset_int offrng
[2], bool *past_end
,
119 ssa_name_limit_t
&snlim
, pointer_query
*qry
)
121 /* Clear and set below for the rare function(s) that might return
122 a past-the-end pointer. */
126 /* Check for attribute fn spec to see if the function returns one
128 attr_fnspec fnspec
= gimple_call_fnspec (as_a
<gcall
*>(stmt
));
130 if (fnspec
.returns_arg (&argno
))
132 /* Functions return the first argument (not a range). */
133 offrng
[0] = offrng
[1] = 0;
134 return gimple_call_arg (stmt
, argno
);
138 if (gimple_call_num_args (stmt
) < 1)
141 tree fn
= gimple_call_fndecl (stmt
);
142 if (!gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
))
144 /* See if this is a call to placement new. */
146 || !DECL_IS_OPERATOR_NEW_P (fn
)
147 || DECL_IS_REPLACEABLE_OPERATOR_NEW_P (fn
))
150 /* Check the mangling, keeping in mind that operator new takes
151 a size_t which could be unsigned int or unsigned long. */
152 tree fname
= DECL_ASSEMBLER_NAME (fn
);
153 if (!id_equal (fname
, "_ZnwjPv") // ordinary form
154 && !id_equal (fname
, "_ZnwmPv") // ordinary form
155 && !id_equal (fname
, "_ZnajPv") // array form
156 && !id_equal (fname
, "_ZnamPv")) // array form
159 if (gimple_call_num_args (stmt
) != 2)
162 /* Allocation functions return a pointer to the beginning. */
163 offrng
[0] = offrng
[1] = 0;
164 return gimple_call_arg (stmt
, 1);
167 switch (DECL_FUNCTION_CODE (fn
))
169 case BUILT_IN_MEMCPY
:
170 case BUILT_IN_MEMCPY_CHK
:
171 case BUILT_IN_MEMMOVE
:
172 case BUILT_IN_MEMMOVE_CHK
:
173 case BUILT_IN_MEMSET
:
174 case BUILT_IN_STRCAT
:
175 case BUILT_IN_STRCAT_CHK
:
176 case BUILT_IN_STRCPY
:
177 case BUILT_IN_STRCPY_CHK
:
178 case BUILT_IN_STRNCAT
:
179 case BUILT_IN_STRNCAT_CHK
:
180 case BUILT_IN_STRNCPY
:
181 case BUILT_IN_STRNCPY_CHK
:
182 /* Functions return the first argument (not a range). */
183 offrng
[0] = offrng
[1] = 0;
184 return gimple_call_arg (stmt
, 0);
186 case BUILT_IN_MEMPCPY
:
187 case BUILT_IN_MEMPCPY_CHK
:
189 /* The returned pointer is in a range constrained by the smaller
190 of the upper bound of the size argument and the source object
193 offrng
[1] = HOST_WIDE_INT_M1U
;
194 tree off
= gimple_call_arg (stmt
, 2);
195 bool off_valid
= get_offset_range (off
, stmt
, offrng
, qry
->rvals
);
196 if (!off_valid
|| offrng
[0] != offrng
[1])
198 /* If the offset is either indeterminate or in some range,
199 try to constrain its upper bound to at most the size
200 of the source object. */
202 tree src
= gimple_call_arg (stmt
, 1);
203 if (compute_objsize_r (src
, stmt
, false, 1, &aref
, snlim
, qry
)
204 && aref
.sizrng
[1] < offrng
[1])
205 offrng
[1] = aref
.sizrng
[1];
208 /* Mempcpy may return a past-the-end pointer. */
210 return gimple_call_arg (stmt
, 0);
213 case BUILT_IN_MEMCHR
:
215 tree off
= gimple_call_arg (stmt
, 2);
216 if (get_offset_range (off
, stmt
, offrng
, qry
->rvals
))
219 offrng
[1] = HOST_WIDE_INT_M1U
;
222 return gimple_call_arg (stmt
, 0);
225 case BUILT_IN_STRCHR
:
226 case BUILT_IN_STRRCHR
:
227 case BUILT_IN_STRSTR
:
229 offrng
[1] = HOST_WIDE_INT_M1U
;
230 return gimple_call_arg (stmt
, 0);
232 case BUILT_IN_STPCPY
:
233 case BUILT_IN_STPCPY_CHK
:
236 tree src
= gimple_call_arg (stmt
, 1);
237 if (compute_objsize_r (src
, stmt
, false, 1, &aref
, snlim
, qry
))
238 offrng
[1] = aref
.sizrng
[1] - 1;
240 offrng
[1] = HOST_WIDE_INT_M1U
;
243 return gimple_call_arg (stmt
, 0);
246 case BUILT_IN_STPNCPY
:
247 case BUILT_IN_STPNCPY_CHK
:
249 /* The returned pointer is in a range between the first argument
250 and it plus the smaller of the upper bound of the size argument
251 and the source object size. */
252 offrng
[1] = HOST_WIDE_INT_M1U
;
253 tree off
= gimple_call_arg (stmt
, 2);
254 if (!get_offset_range (off
, stmt
, offrng
, qry
->rvals
)
255 || offrng
[0] != offrng
[1])
257 /* If the offset is either indeterminate or in some range,
258 try to constrain its upper bound to at most the size
259 of the source object. */
261 tree src
= gimple_call_arg (stmt
, 1);
262 if (compute_objsize_r (src
, stmt
, false, 1, &aref
, snlim
, qry
)
263 && aref
.sizrng
[1] < offrng
[1])
264 offrng
[1] = aref
.sizrng
[1];
267 /* When the source is the empty string the returned pointer is
268 a copy of the argument. Otherwise stpcpy can also return
269 a past-the-end pointer. */
272 return gimple_call_arg (stmt
, 0);
282 /* Return true when EXP's range can be determined and set RANGE[] to it
283 after adjusting it if necessary to make EXP a represents a valid size
284 of object, or a valid size argument to an allocation function declared
285 with attribute alloc_size (whose argument may be signed), or to a string
286 manipulation function like memset.
287 When ALLOW_ZERO is set in FLAGS, allow returning a range of [0, 0] for
288 a size in an anti-range [1, N] where N > PTRDIFF_MAX. A zero range is
289 a (nearly) invalid argument to allocation functions like malloc but it
290 is a valid argument to functions like memset.
291 When USE_LARGEST is set in FLAGS set RANGE to the largest valid subrange
292 in a multi-range, otherwise to the smallest valid subrange. */
295 get_size_range (range_query
*query
, tree exp
, gimple
*stmt
, tree range
[2],
301 if (tree_fits_uhwi_p (exp
))
303 /* EXP is a constant. */
304 range
[0] = range
[1] = exp
;
308 tree exptype
= TREE_TYPE (exp
);
309 bool integral
= INTEGRAL_TYPE_P (exptype
);
312 enum value_range_kind range_type
;
315 query
= get_range_query (cfun
);
322 query
->range_of_expr (vr
, exp
, stmt
);
324 if (vr
.undefined_p ())
325 vr
.set_varying (TREE_TYPE (exp
));
326 range_type
= get_legacy_range (vr
, tmin
, tmax
);
327 min
= wi::to_wide (tmin
);
328 max
= wi::to_wide (tmax
);
331 range_type
= VR_VARYING
;
333 if (range_type
== VR_VARYING
)
337 /* Use the full range of the type of the expression when
338 no value range information is available. */
339 range
[0] = TYPE_MIN_VALUE (exptype
);
340 range
[1] = TYPE_MAX_VALUE (exptype
);
344 range
[0] = NULL_TREE
;
345 range
[1] = NULL_TREE
;
349 unsigned expprec
= TYPE_PRECISION (exptype
);
351 bool signed_p
= !TYPE_UNSIGNED (exptype
);
353 if (range_type
== VR_ANTI_RANGE
)
357 if (wi::les_p (max
, 0))
359 /* EXP is not in a strictly negative range. That means
360 it must be in some (not necessarily strictly) positive
361 range which includes zero. Since in signed to unsigned
362 conversions negative values end up converted to large
363 positive values, and otherwise they are not valid sizes,
364 the resulting range is in both cases [0, TYPE_MAX]. */
365 min
= wi::zero (expprec
);
366 max
= wi::to_wide (TYPE_MAX_VALUE (exptype
));
368 else if (wi::les_p (min
- 1, 0))
370 /* EXP is not in a negative-positive range. That means EXP
371 is either negative, or greater than max. Since negative
372 sizes are invalid make the range [MAX + 1, TYPE_MAX]. */
374 max
= wi::to_wide (TYPE_MAX_VALUE (exptype
));
379 min
= wi::zero (expprec
);
384 wide_int maxsize
= wi::to_wide (max_object_size ());
385 min
= wide_int::from (min
, maxsize
.get_precision (), UNSIGNED
);
386 max
= wide_int::from (max
, maxsize
.get_precision (), UNSIGNED
);
387 if (wi::eq_p (0, min
- 1))
389 /* EXP is unsigned and not in the range [1, MAX]. That means
390 it's either zero or greater than MAX. Even though 0 would
391 normally be detected by -Walloc-zero, unless ALLOW_ZERO
392 is set, set the range to [MAX, TYPE_MAX] so that when MAX
393 is greater than the limit the whole range is diagnosed. */
394 wide_int maxsize
= wi::to_wide (max_object_size ());
395 if (flags
& SR_ALLOW_ZERO
)
397 if (wi::leu_p (maxsize
, max
+ 1)
398 || !(flags
& SR_USE_LARGEST
))
399 min
= max
= wi::zero (expprec
);
403 max
= wi::to_wide (TYPE_MAX_VALUE (exptype
));
409 max
= wi::to_wide (TYPE_MAX_VALUE (exptype
));
412 else if ((flags
& SR_USE_LARGEST
)
413 && wi::ltu_p (max
+ 1, maxsize
))
415 /* When USE_LARGEST is set and the larger of the two subranges
416 is a valid size, use it... */
422 /* ...otherwise use the smaller subrange. */
424 min
= wi::zero (expprec
);
429 range
[0] = wide_int_to_tree (exptype
, min
);
430 range
[1] = wide_int_to_tree (exptype
, max
);
436 get_size_range (tree exp
, tree range
[2], int flags
/* = 0 */)
438 return get_size_range (/*query=*/NULL
, exp
, /*stmt=*/NULL
, range
, flags
);
441 /* If STMT is a call to an allocation function, returns the constant
442 maximum size of the object allocated by the call represented as
443 sizetype. If nonnull, sets RNG1[] to the range of the size.
444 When nonnull, uses RVALS for range information, otherwise gets global
446 Returns null when STMT is not a call to a valid allocation function. */
449 gimple_call_alloc_size (gimple
*stmt
, wide_int rng1
[2] /* = NULL */,
450 range_query
*qry
/* = NULL */)
452 if (!stmt
|| !is_gimple_call (stmt
))
456 if (tree fndecl
= gimple_call_fndecl (stmt
))
457 allocfntype
= TREE_TYPE (fndecl
);
459 allocfntype
= gimple_call_fntype (stmt
);
464 unsigned argidx1
= UINT_MAX
, argidx2
= UINT_MAX
;
465 tree at
= lookup_attribute ("alloc_size", TYPE_ATTRIBUTES (allocfntype
));
468 if (!gimple_call_builtin_p (stmt
, BUILT_IN_ALLOCA_WITH_ALIGN
))
474 unsigned nargs
= gimple_call_num_args (stmt
);
476 if (argidx1
== UINT_MAX
)
478 tree atval
= TREE_VALUE (at
);
482 argidx1
= TREE_INT_CST_LOW (TREE_VALUE (atval
)) - 1;
483 if (nargs
<= argidx1
)
486 atval
= TREE_CHAIN (atval
);
489 argidx2
= TREE_INT_CST_LOW (TREE_VALUE (atval
)) - 1;
490 if (nargs
<= argidx2
)
495 tree size
= gimple_call_arg (stmt
, argidx1
);
497 wide_int rng1_buf
[2];
498 /* If RNG1 is not set, use the buffer. */
502 /* Use maximum precision to avoid overflow below. */
503 const int prec
= ADDR_MAX_PRECISION
;
507 /* Determine the largest valid range size, including zero. */
508 if (!get_size_range (qry
, size
, stmt
, r
, SR_ALLOW_ZERO
| SR_USE_LARGEST
))
510 rng1
[0] = wi::to_wide (r
[0], prec
);
511 rng1
[1] = wi::to_wide (r
[1], prec
);
514 if (argidx2
> nargs
&& TREE_CODE (size
) == INTEGER_CST
)
515 return fold_convert (sizetype
, size
);
517 /* To handle ranges do the math in wide_int and return the product
518 of the upper bounds as a constant. Ignore anti-ranges. */
519 tree n
= argidx2
< nargs
? gimple_call_arg (stmt
, argidx2
) : integer_one_node
;
523 /* As above, use the full non-negative range on failure. */
524 if (!get_size_range (qry
, n
, stmt
, r
, SR_ALLOW_ZERO
| SR_USE_LARGEST
))
526 rng2
[0] = wi::to_wide (r
[0], prec
);
527 rng2
[1] = wi::to_wide (r
[1], prec
);
530 /* Compute products of both bounds for the caller but return the lesser
531 of SIZE_MAX and the product of the upper bounds as a constant. */
532 rng1
[0] = rng1
[0] * rng2
[0];
533 rng1
[1] = rng1
[1] * rng2
[1];
535 const tree size_max
= TYPE_MAX_VALUE (sizetype
);
536 if (wi::gtu_p (rng1
[1], wi::to_wide (size_max
, prec
)))
538 rng1
[1] = wi::to_wide (size_max
, prec
);
542 return wide_int_to_tree (sizetype
, rng1
[1]);
545 /* For an access to an object referenced to by the function parameter PTR
546 of pointer type, and set RNG[] to the range of sizes of the object
547 obtainedfrom the attribute access specification for the current function.
548 Set STATIC_ARRAY if the array parameter has been declared [static].
549 Return the function parameter on success and null otherwise. */
552 gimple_parm_array_size (tree ptr
, wide_int rng
[2],
553 bool *static_array
/* = NULL */)
555 /* For a function argument try to determine the byte size of the array
556 from the current function declaratation (e.g., attribute access or
558 tree var
= SSA_NAME_VAR (ptr
);
559 if (TREE_CODE (var
) != PARM_DECL
|| !POINTER_TYPE_P (TREE_TYPE (var
)))
562 const unsigned prec
= TYPE_PRECISION (sizetype
);
565 attr_access
*access
= get_parm_access (rdwr_idx
, var
);
569 if (access
->sizarg
!= UINT_MAX
)
571 /* TODO: Try to extract the range from the argument based on
572 those of subsequent assertions or based on known calls to
573 the current function. */
577 if (!access
->minsize
)
580 /* Only consider ordinary array bound at level 2 (or above if it's
582 if (warn_array_parameter
< 2 && !access
->static_p
)
586 *static_array
= access
->static_p
;
588 rng
[0] = wi::zero (prec
);
589 rng
[1] = wi::uhwi (access
->minsize
, prec
);
590 /* Multiply the array bound encoded in the attribute by the size
591 of what the pointer argument to which it decays points to. */
592 tree eltype
= TREE_TYPE (TREE_TYPE (ptr
));
593 tree size
= TYPE_SIZE_UNIT (eltype
);
594 if (!size
|| TREE_CODE (size
) != INTEGER_CST
)
597 rng
[1] *= wi::to_wide (size
, prec
);
601 /* Initialize the object. */
603 access_ref::access_ref ()
604 : ref (), eval ([](tree x
){ return x
; }), deref (), ref_nullptr_p (false),
605 trail1special (true), base0 (true), parmarray ()
608 offrng
[0] = offrng
[1] = 0;
609 offmax
[0] = offmax
[1] = 0;
611 sizrng
[0] = sizrng
[1] = -1;
614 /* Return the PHI node REF refers to or null if it doesn't. */
617 access_ref::phi () const
619 if (!ref
|| TREE_CODE (ref
) != SSA_NAME
)
622 gimple
*def_stmt
= SSA_NAME_DEF_STMT (ref
);
623 if (!def_stmt
|| gimple_code (def_stmt
) != GIMPLE_PHI
)
626 return as_a
<gphi
*> (def_stmt
);
629 /* Determine the size and offset for ARG, append it to ALL_REFS, and
630 merge the result with *THIS. Ignore ARG if SKIP_NULL is set and
631 ARG refers to the null pointer. Return true on success and false
635 access_ref::merge_ref (vec
<access_ref
> *all_refs
, tree arg
, gimple
*stmt
,
636 int ostype
, bool skip_null
,
637 ssa_name_limit_t
&snlim
, pointer_query
&qry
)
640 if (!compute_objsize_r (arg
, stmt
, false, ostype
, &aref
, snlim
, &qry
)
641 || aref
.sizrng
[0] < 0)
643 /* This may be a PHI with all null pointer arguments. Handle it
644 conservatively by setting all properties to the most permissive
647 offrng
[0] = offrng
[1] = 0;
649 set_max_size_range ();
655 access_ref dummy_ref
;
656 aref
.get_ref (all_refs
, &dummy_ref
, ostype
, &snlim
, &qry
);
659 if (TREE_CODE (arg
) == SSA_NAME
)
660 qry
.put_ref (arg
, aref
, ostype
);
663 all_refs
->safe_push (aref
);
667 bool merged_parmarray
= aref
.parmarray
;
669 const bool nullp
= skip_null
&& integer_zerop (arg
);
670 const offset_int maxobjsize
= wi::to_offset (max_object_size ());
671 offset_int minsize
= sizrng
[0];
675 /* If *THIS doesn't contain a meaningful result yet set it to AREF
676 unless the argument is null and it's okay to ignore it. */
680 /* Set if the current argument refers to one or more objects of
681 known size (or range of sizes), as opposed to referring to
682 one or more unknown object(s). */
683 const bool arg_known_size
= (aref
.sizrng
[0] != 0
684 || aref
.sizrng
[1] != maxobjsize
);
686 sizrng
[0] = aref
.sizrng
[0];
691 /* Disregard null pointers in PHIs with two or more arguments.
692 TODO: Handle this better! */
696 const bool known_size
= (sizrng
[0] != 0 || sizrng
[1] != maxobjsize
);
698 if (known_size
&& aref
.sizrng
[0] < minsize
)
699 minsize
= aref
.sizrng
[0];
701 /* Extend the size and offset of *THIS to account for AREF. The result
702 can be cached but results in false negatives. */
705 if (sizrng
[1] < aref
.sizrng
[1])
713 orng
[0] = aref
.offrng
[0];
714 orng
[1] = aref
.offrng
[1];
717 if (orng
[0] < offrng
[0])
719 if (offrng
[1] < orng
[1])
722 /* Reset the PHI's BASE0 flag if any of the nonnull arguments
723 refers to an object at an unknown offset. */
728 parmarray
= merged_parmarray
;
733 /* Determine and return the largest object to which *THIS refers. If
734 *THIS refers to a PHI and PREF is nonnull, fill *PREF with the details
735 of the object determined by compute_objsize(ARG, OSTYPE) for each PHI
739 access_ref::get_ref (vec
<access_ref
> *all_refs
,
740 access_ref
*pref
/* = NULL */,
741 int ostype
/* = 1 */,
742 ssa_name_limit_t
*psnlim
/* = NULL */,
743 pointer_query
*qry
/* = NULL */) const
745 if (!ref
|| TREE_CODE (ref
) != SSA_NAME
)
748 /* FIXME: Calling get_ref() with a null PSNLIM is dangerous and might
749 cause unbounded recursion. */
750 ssa_name_limit_t snlim_buf
;
754 pointer_query empty_qry
;
758 if (gimple
*def_stmt
= SSA_NAME_DEF_STMT (ref
))
760 if (is_gimple_assign (def_stmt
))
762 tree_code code
= gimple_assign_rhs_code (def_stmt
);
763 if (code
!= MIN_EXPR
&& code
!= MAX_EXPR
)
767 tree arg1
= gimple_assign_rhs1 (def_stmt
);
768 aref
.merge_ref (all_refs
, arg1
, def_stmt
, ostype
, false,
771 tree arg2
= gimple_assign_rhs2 (def_stmt
);
772 aref
.merge_ref (all_refs
, arg2
, def_stmt
, ostype
, false,
775 if (pref
&& pref
!= this)
777 tree ref
= pref
->ref
;
788 gphi
*phi_stmt
= this->phi ();
792 if (!psnlim
->visit_phi (ref
))
795 /* The conservative result of the PHI reflecting the offset and size
796 of the largest PHI argument, regardless of whether or not they all
797 refer to the same object. */
801 /* The identity of the object has not been determined yet but
802 PREF->REF is set by the caller to the PHI for convenience.
803 The size is negative/invalid and the offset is zero (it's
804 updated only after the identity of the object has been
806 gcc_assert (pref
->sizrng
[0] < 0);
807 gcc_assert (pref
->offrng
[0] == 0 && pref
->offrng
[1] == 0);
812 const offset_int maxobjsize
= wi::to_offset (max_object_size ());
813 const unsigned nargs
= gimple_phi_num_args (phi_stmt
);
814 for (unsigned i
= 0; i
< nargs
; ++i
)
816 access_ref phi_arg_ref
;
817 bool skip_null
= i
|| i
+ 1 < nargs
;
818 tree arg
= gimple_phi_arg_def (phi_stmt
, i
);
819 phi_ref
.merge_ref (all_refs
, arg
, phi_stmt
, ostype
, skip_null
,
823 && phi_ref
.sizrng
[0] == 0
824 && phi_ref
.sizrng
[1] >= maxobjsize
)
825 /* When an argument results in the most permissive result,
826 the remaining arguments cannot constrain it. Short-circuit
831 if (phi_ref
.sizrng
[0] < 0)
833 /* Fail if none of the PHI's arguments resulted in updating PHI_REF
834 (perhaps because they have all been already visited by prior
836 psnlim
->leave_phi (ref
);
840 /* Avoid changing *THIS. */
841 if (pref
&& pref
!= this)
843 /* Keep the SSA_NAME of the PHI unchanged so that all PHI arguments
844 can be referred to later if necessary. This is useful even if
845 they all refer to the same object. */
846 tree ref
= pref
->ref
;
851 psnlim
->leave_phi (ref
);
856 /* Return the maximum amount of space remaining and if non-null, set
857 argument to the minimum. */
860 access_ref::size_remaining (offset_int
*pmin
/* = NULL */) const
868 /* If the identity of the object hasn't been determined return
869 the maximum size range. */
871 return wi::to_offset (max_object_size ());
874 /* add_offset() ensures the offset range isn't inverted. */
875 gcc_checking_assert (offrng
[0] <= offrng
[1]);
879 /* The offset into referenced object is zero-based (i.e., it's
880 not referenced by a pointer into middle of some unknown object). */
881 if (offrng
[0] < 0 && offrng
[1] < 0)
883 /* If the offset is negative the remaining size is zero. */
888 if (sizrng
[1] <= offrng
[0])
890 /* If the starting offset is greater than or equal to the upper
891 bound on the size of the object, the space remaining is zero.
892 As a special case, if it's equal, set *PMIN to -1 to let
893 the caller know the offset is valid and just past the end. */
894 *pmin
= sizrng
[1] == offrng
[0] ? -1 : 0;
898 /* Otherwise return the size minus the lower bound of the offset. */
899 offset_int or0
= offrng
[0] < 0 ? 0 : offrng
[0];
901 *pmin
= sizrng
[0] - or0
;
902 return sizrng
[1] - or0
;
905 /* The offset to the referenced object isn't zero-based (i.e., it may
906 refer to a byte other than the first. The size of such an object
907 is constrained only by the size of the address space (the result
908 of max_object_size()). */
909 if (sizrng
[1] <= offrng
[0])
915 offset_int or0
= offrng
[0] < 0 ? 0 : offrng
[0];
917 *pmin
= sizrng
[0] - or0
;
918 return sizrng
[1] - or0
;
921 /* Return true if the offset and object size are in range for SIZE. */
924 access_ref::offset_in_range (const offset_int
&size
) const
926 if (size_remaining () < size
)
930 return offmax
[0] >= 0 && offmax
[1] <= sizrng
[1];
932 offset_int maxoff
= wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node
));
933 return offmax
[0] > -maxoff
&& offmax
[1] < maxoff
;
936 /* Add the range [MIN, MAX] to the offset range. For known objects (with
937 zero-based offsets) at least one of whose offset's bounds is in range,
938 constrain the other (or both) to the bounds of the object (i.e., zero
939 and the upper bound of its size). This improves the quality of
942 void access_ref::add_offset (const offset_int
&min
, const offset_int
&max
)
946 /* To add an ordinary range just add it to the bounds. */
952 /* To add an inverted range to an offset to an unknown object
953 expand it to the maximum. */
959 /* To add an inverted range to an offset to an known object set
960 the upper bound to the maximum representable offset value
961 (which may be greater than MAX_OBJECT_SIZE).
962 The lower bound is either the sum of the current offset and
963 MIN when abs(MAX) is greater than the former, or zero otherwise.
964 Zero because then the inverted range includes the negative of
966 offset_int maxoff
= wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node
));
977 offset_int absmax
= wi::abs (max
);
978 if (offrng
[0] < absmax
)
981 /* Cap the lower bound at the upper (set to MAXOFF above)
982 to avoid inadvertently recreating an inverted range. */
983 if (offrng
[1] < offrng
[0])
984 offrng
[0] = offrng
[1];
990 /* Set the minimum and maximmum computed so far. */
991 if (offrng
[1] < 0 && offrng
[1] < offmax
[0])
992 offmax
[0] = offrng
[1];
993 if (offrng
[0] > 0 && offrng
[0] > offmax
[1])
994 offmax
[1] = offrng
[0];
999 /* When referencing a known object check to see if the offset computed
1000 so far is in bounds... */
1001 offset_int remrng
[2];
1002 remrng
[1] = size_remaining (remrng
);
1003 if (remrng
[1] > 0 || remrng
[0] < 0)
1005 /* ...if so, constrain it so that neither bound exceeds the size of
1006 the object. Out of bounds offsets are left unchanged, and, for
1007 better or worse, become in bounds later. They should be detected
1008 and diagnosed at the point they first become invalid by
1012 if (offrng
[1] > sizrng
[1])
1013 offrng
[1] = sizrng
[1];
1017 /* Issue one inform message describing each target of an access REF.
1018 WRITE is set for a write access and clear for a read access. */
1021 access_ref::inform_access (access_mode mode
, int ostype
/* = 1 */) const
1023 const access_ref
&aref
= *this;
1029 /* Set MAXREF to refer to the largest object and fill ALL_REFS
1030 with data for all objects referenced by the PHI arguments. */
1032 auto_vec
<access_ref
> all_refs
;
1033 if (!get_ref (&all_refs
, &maxref
, ostype
))
1036 if (all_refs
.length ())
1038 /* Except for MAXREF, the rest of the arguments' offsets need not
1039 reflect one added to the PHI itself. Determine the latter from
1040 MAXREF on which the result is based. */
1041 const offset_int orng
[] =
1043 offrng
[0] - maxref
.offrng
[0],
1044 wi::smax (offrng
[1] - maxref
.offrng
[1], offrng
[0]),
1047 /* Add the final PHI's offset to that of each of the arguments
1048 and recurse to issue an inform message for it. */
1049 for (unsigned i
= 0; i
!= all_refs
.length (); ++i
)
1051 /* Skip any PHIs; those could lead to infinite recursion. */
1052 if (all_refs
[i
].phi ())
1055 all_refs
[i
].add_offset (orng
[0], orng
[1]);
1056 all_refs
[i
].inform_access (mode
, ostype
);
1062 /* Convert offset range and avoid including a zero range since it
1063 isn't necessarily meaningful. */
1064 HOST_WIDE_INT diff_min
= tree_to_shwi (TYPE_MIN_VALUE (ptrdiff_type_node
));
1065 HOST_WIDE_INT diff_max
= tree_to_shwi (TYPE_MAX_VALUE (ptrdiff_type_node
));
1066 HOST_WIDE_INT minoff
;
1067 HOST_WIDE_INT maxoff
= diff_max
;
1068 if (wi::fits_shwi_p (aref
.offrng
[0]))
1069 minoff
= aref
.offrng
[0].to_shwi ();
1071 minoff
= aref
.offrng
[0] < 0 ? diff_min
: diff_max
;
1073 if (wi::fits_shwi_p (aref
.offrng
[1]))
1074 maxoff
= aref
.offrng
[1].to_shwi ();
1076 if (maxoff
<= diff_min
|| maxoff
>= diff_max
)
1077 /* Avoid mentioning an upper bound that's equal to or in excess
1078 of the maximum of ptrdiff_t. */
1081 /* Convert size range and always include it since all sizes are
1083 unsigned long long minsize
= 0, maxsize
= 0;
1084 if (wi::fits_shwi_p (aref
.sizrng
[0])
1085 && wi::fits_shwi_p (aref
.sizrng
[1]))
1087 minsize
= aref
.sizrng
[0].to_shwi ();
1088 maxsize
= aref
.sizrng
[1].to_shwi ();
1091 /* SIZRNG doesn't necessarily have the same range as the allocation
1092 size determined by gimple_call_alloc_size (). */
1094 if (minsize
== maxsize
)
1095 sprintf (sizestr
, "%llu", minsize
);
1097 sprintf (sizestr
, "[%llu, %llu]", minsize
, maxsize
);
1101 && (maxoff
== 0 || aref
.sizrng
[1] <= maxoff
))
1103 else if (minoff
== maxoff
)
1104 sprintf (offstr
, "%lli", (long long) minoff
);
1106 sprintf (offstr
, "[%lli, %lli]", (long long) minoff
, (long long) maxoff
);
1108 location_t loc
= UNKNOWN_LOCATION
;
1110 tree ref
= this->ref
;
1111 tree allocfn
= NULL_TREE
;
1112 if (TREE_CODE (ref
) == SSA_NAME
)
1114 gimple
*stmt
= SSA_NAME_DEF_STMT (ref
);
1118 if (is_gimple_call (stmt
))
1120 loc
= gimple_location (stmt
);
1121 if (gimple_call_builtin_p (stmt
, BUILT_IN_ALLOCA_WITH_ALIGN
))
1123 /* Strip the SSA_NAME suffix from the variable name and
1124 recreate an identifier with the VLA's original name. */
1125 ref
= gimple_call_lhs (stmt
);
1126 if (SSA_NAME_IDENTIFIER (ref
))
1128 ref
= SSA_NAME_IDENTIFIER (ref
);
1129 const char *id
= IDENTIFIER_POINTER (ref
);
1130 size_t len
= strcspn (id
, ".$");
1133 ref
= get_identifier_with_length (id
, len
);
1138 /* Except for VLAs, retrieve the allocation function. */
1139 allocfn
= gimple_call_fndecl (stmt
);
1141 allocfn
= gimple_call_fn (stmt
);
1142 if (TREE_CODE (allocfn
) == SSA_NAME
)
1144 /* For an ALLOC_CALL via a function pointer make a small
1145 effort to determine the destination of the pointer. */
1146 gimple
*def
= SSA_NAME_DEF_STMT (allocfn
);
1147 if (gimple_assign_single_p (def
))
1149 tree rhs
= gimple_assign_rhs1 (def
);
1152 else if (TREE_CODE (rhs
) == COMPONENT_REF
)
1153 allocfn
= TREE_OPERAND (rhs
, 1);
1158 else if (gimple_nop_p (stmt
))
1159 /* Handle DECL_PARM below. */
1160 ref
= SSA_NAME_VAR (ref
);
1161 else if (is_gimple_assign (stmt
)
1162 && (gimple_assign_rhs_code (stmt
) == MIN_EXPR
1163 || gimple_assign_rhs_code (stmt
) == MAX_EXPR
))
1165 /* MIN or MAX_EXPR here implies a reference to a known object
1166 and either an unknown or distinct one (the latter being
1167 the result of an invalid relational expression). Determine
1168 the identity of the former and point to it in the note.
1169 TODO: Consider merging with PHI handling. */
1170 access_ref arg_ref
[2];
1171 tree arg
= gimple_assign_rhs1 (stmt
);
1172 compute_objsize (arg
, /* ostype = */ 1 , &arg_ref
[0]);
1173 arg
= gimple_assign_rhs2 (stmt
);
1174 compute_objsize (arg
, /* ostype = */ 1 , &arg_ref
[1]);
1176 /* Use the argument that references a known object with more
1179 = (!arg_ref
[0].ref
|| !arg_ref
[0].base0
1180 || (arg_ref
[0].base0
&& arg_ref
[1].base0
1181 && (arg_ref
[0].size_remaining ()
1182 < arg_ref
[1].size_remaining ())));
1184 arg_ref
[idx
].offrng
[0] = offrng
[0];
1185 arg_ref
[idx
].offrng
[1] = offrng
[1];
1186 arg_ref
[idx
].inform_access (mode
);
1192 loc
= DECL_SOURCE_LOCATION (ref
);
1193 else if (EXPR_P (ref
) && EXPR_HAS_LOCATION (ref
))
1194 loc
= EXPR_LOCATION (ref
);
1195 else if (TREE_CODE (ref
) != IDENTIFIER_NODE
1196 && TREE_CODE (ref
) != SSA_NAME
)
1198 if (TREE_CODE (ref
) == INTEGER_CST
&& ref_nullptr_p
)
1200 if (mode
== access_read_write
|| mode
== access_write_only
)
1201 inform (loc
, "destination object is likely at address zero");
1203 inform (loc
, "source object is likely at address zero");
1208 if (mode
== access_read_write
|| mode
== access_write_only
)
1210 if (allocfn
== NULL_TREE
)
1213 inform (loc
, "at offset %s into destination object %qE of size %s",
1214 offstr
, ref
, sizestr
);
1216 inform (loc
, "destination object %qE of size %s", ref
, sizestr
);
1222 "at offset %s into destination object of size %s "
1223 "allocated by %qE", offstr
, sizestr
, allocfn
);
1225 inform (loc
, "destination object of size %s allocated by %qE",
1230 if (mode
== access_read_only
)
1232 if (allocfn
== NULL_TREE
)
1235 inform (loc
, "at offset %s into source object %qE of size %s",
1236 offstr
, ref
, sizestr
);
1238 inform (loc
, "source object %qE of size %s", ref
, sizestr
);
1245 "at offset %s into source object of size %s allocated by %qE",
1246 offstr
, sizestr
, allocfn
);
1248 inform (loc
, "source object of size %s allocated by %qE",
1253 if (allocfn
== NULL_TREE
)
1256 inform (loc
, "at offset %s into object %qE of size %s",
1257 offstr
, ref
, sizestr
);
1259 inform (loc
, "object %qE of size %s", ref
, sizestr
);
1266 "at offset %s into object of size %s allocated by %qE",
1267 offstr
, sizestr
, allocfn
);
1269 inform (loc
, "object of size %s allocated by %qE",
1273 /* Dump *THIS to FILE. */
1276 access_ref::dump (FILE *file
) const
1278 for (int i
= deref
; i
< 0; ++i
)
1281 for (int i
= 0; i
< deref
; ++i
)
1284 if (gphi
*phi_stmt
= phi ())
1286 fputs ("PHI <", file
);
1287 unsigned nargs
= gimple_phi_num_args (phi_stmt
);
1288 for (unsigned i
= 0; i
!= nargs
; ++i
)
1290 tree arg
= gimple_phi_arg_def (phi_stmt
, i
);
1291 print_generic_expr (file
, arg
);
1298 print_generic_expr (file
, ref
);
1300 if (offrng
[0] != offrng
[1])
1301 fprintf (file
, " + [%lli, %lli]",
1302 (long long) offrng
[0].to_shwi (),
1303 (long long) offrng
[1].to_shwi ());
1304 else if (offrng
[0] != 0)
1305 fprintf (file
, " %c %lli",
1306 offrng
[0] < 0 ? '-' : '+',
1307 (long long) offrng
[0].to_shwi ());
1310 fputs (" (base0)", file
);
1312 fputs ("; size: ", file
);
1313 if (sizrng
[0] != sizrng
[1])
1315 offset_int maxsize
= wi::to_offset (max_object_size ());
1316 if (sizrng
[0] == 0 && sizrng
[1] >= maxsize
)
1317 fputs ("unknown", file
);
1319 fprintf (file
, "[%llu, %llu]",
1320 (unsigned long long) sizrng
[0].to_uhwi (),
1321 (unsigned long long) sizrng
[1].to_uhwi ());
1323 else if (sizrng
[0] != 0)
1324 fprintf (file
, "%llu",
1325 (unsigned long long) sizrng
[0].to_uhwi ());
1330 /* Set the access to at most MAXWRITE and MAXREAD bytes, and at least 1
1331 when MINWRITE or MINREAD, respectively, is set. */
1332 access_data::access_data (range_query
*query
, gimple
*stmt
, access_mode mode
,
1333 tree maxwrite
/* = NULL_TREE */,
1334 bool minwrite
/* = false */,
1335 tree maxread
/* = NULL_TREE */,
1336 bool minread
/* = false */)
1337 : stmt (stmt
), call (), dst (), src (), mode (mode
), ostype ()
1339 set_bound (dst_bndrng
, maxwrite
, minwrite
, query
, stmt
);
1340 set_bound (src_bndrng
, maxread
, minread
, query
, stmt
);
1343 /* Set the access to at most MAXWRITE and MAXREAD bytes, and at least 1
1344 when MINWRITE or MINREAD, respectively, is set. */
1345 access_data::access_data (range_query
*query
, tree expr
, access_mode mode
,
1346 tree maxwrite
/* = NULL_TREE */,
1347 bool minwrite
/* = false */,
1348 tree maxread
/* = NULL_TREE */,
1349 bool minread
/* = false */)
1350 : stmt (), call (expr
), dst (), src (), mode (mode
), ostype ()
1352 set_bound (dst_bndrng
, maxwrite
, minwrite
, query
, stmt
);
1353 set_bound (src_bndrng
, maxread
, minread
, query
, stmt
);
1356 /* Set BNDRNG to the range of BOUND for the statement STMT. */
1359 access_data::set_bound (offset_int bndrng
[2], tree bound
, bool minaccess
,
1360 range_query
*query
, gimple
*stmt
)
1362 /* Set the default bounds of the access and adjust below. */
1363 bndrng
[0] = minaccess
? 1 : 0;
1364 bndrng
[1] = HOST_WIDE_INT_M1U
;
1366 /* When BOUND is nonnull and a range can be extracted from it,
1367 set the bounds of the access to reflect both it and MINACCESS.
1368 BNDRNG[0] is the size of the minimum access. */
1370 if (bound
&& get_size_range (query
, bound
, stmt
, rng
, SR_ALLOW_ZERO
))
1372 bndrng
[0] = wi::to_offset (rng
[0]);
1373 bndrng
[1] = wi::to_offset (rng
[1]);
1374 bndrng
[0] = bndrng
[0] > 0 && minaccess
? 1 : 0;
1378 /* Set a bit for the PHI in VISITED and return true if it wasn't
1382 ssa_name_limit_t::visit_phi (tree ssa_name
)
1385 visited
= BITMAP_ALLOC (NULL
);
1387 /* Return false if SSA_NAME has already been visited. */
1388 return bitmap_set_bit (visited
, SSA_NAME_VERSION (ssa_name
));
1391 /* Clear a bit for the PHI in VISITED. */
1394 ssa_name_limit_t::leave_phi (tree ssa_name
)
1396 /* Return false if SSA_NAME has already been visited. */
1397 bitmap_clear_bit (visited
, SSA_NAME_VERSION (ssa_name
));
1400 /* Return false if the SSA_NAME chain length counter has reached
1401 the limit, otherwise increment the counter and return true. */
1404 ssa_name_limit_t::next ()
1406 /* Return a negative value to let caller avoid recursing beyond
1407 the specified limit. */
1408 if (ssa_def_max
== 0)
1415 /* If the SSA_NAME has already been "seen" return a positive value.
1416 Otherwise add it to VISITED. If the SSA_NAME limit has been
1417 reached, return a negative value. Otherwise return zero. */
1420 ssa_name_limit_t::next_phi (tree ssa_name
)
1423 gimple
*def_stmt
= SSA_NAME_DEF_STMT (ssa_name
);
1424 /* Return a positive value if the PHI has already been visited. */
1425 if (gimple_code (def_stmt
) == GIMPLE_PHI
1426 && !visit_phi (ssa_name
))
1430 /* Return a negative value to let caller avoid recursing beyond
1431 the specified limit. */
1432 if (ssa_def_max
== 0)
1440 ssa_name_limit_t::~ssa_name_limit_t ()
1443 BITMAP_FREE (visited
);
1446 /* Default ctor. Initialize object with pointers to the range_query
1447 instance to use or null. */
1449 pointer_query::pointer_query (range_query
*qry
/* = NULL */)
1450 : rvals (qry
), hits (), misses (), failures (), depth (), max_depth (),
1456 /* Return a pointer to the cached access_ref instance for the SSA_NAME
1457 PTR if it's there or null otherwise. */
1460 pointer_query::get_ref (tree ptr
, int ostype
/* = 1 */) const
1462 unsigned version
= SSA_NAME_VERSION (ptr
);
1463 unsigned idx
= version
<< 1 | (ostype
& 1);
1464 if (var_cache
.indices
.length () <= idx
)
1470 unsigned cache_idx
= var_cache
.indices
[idx
];
1471 if (var_cache
.access_refs
.length () <= cache_idx
)
1477 const access_ref
&cache_ref
= var_cache
.access_refs
[cache_idx
];
1488 /* Retrieve the access_ref instance for a variable from the cache if it's
1489 there or compute it and insert it into the cache if it's nonnonull. */
1492 pointer_query::get_ref (tree ptr
, gimple
*stmt
, access_ref
*pref
,
1493 int ostype
/* = 1 */)
1495 const unsigned version
1496 = TREE_CODE (ptr
) == SSA_NAME
? SSA_NAME_VERSION (ptr
) : 0;
1500 unsigned idx
= version
<< 1 | (ostype
& 1);
1501 if (idx
< var_cache
.indices
.length ())
1503 unsigned cache_idx
= var_cache
.indices
[idx
] - 1;
1504 if (cache_idx
< var_cache
.access_refs
.length ()
1505 && var_cache
.access_refs
[cache_idx
].ref
)
1508 *pref
= var_cache
.access_refs
[cache_idx
];
1516 if (!compute_objsize (ptr
, stmt
, ostype
, pref
, this))
1525 /* Add a copy of the access_ref REF for the SSA_NAME to the cache if it's
1529 pointer_query::put_ref (tree ptr
, const access_ref
&ref
, int ostype
/* = 1 */)
1531 /* Only add populated/valid entries. */
1532 if (!ref
.ref
|| ref
.sizrng
[0] < 0)
1535 /* Add REF to the two-level cache. */
1536 unsigned version
= SSA_NAME_VERSION (ptr
);
1537 unsigned idx
= version
<< 1 | (ostype
& 1);
1539 /* Grow INDICES if necessary. An index is valid if it's nonzero.
1540 Its value minus one is the index into ACCESS_REFS. Not all
1541 entries are valid. */
1542 if (var_cache
.indices
.length () <= idx
)
1543 var_cache
.indices
.safe_grow_cleared (idx
+ 1);
1545 if (!var_cache
.indices
[idx
])
1546 var_cache
.indices
[idx
] = var_cache
.access_refs
.length () + 1;
1548 /* Grow ACCESS_REF cache if necessary. An entry is valid if its
1549 REF member is nonnull. All entries except for the last two
1550 are valid. Once nonnull, the REF value must stay unchanged. */
1551 unsigned cache_idx
= var_cache
.indices
[idx
];
1552 if (var_cache
.access_refs
.length () <= cache_idx
)
1553 var_cache
.access_refs
.safe_grow_cleared (cache_idx
+ 1);
1555 access_ref
&cache_ref
= var_cache
.access_refs
[cache_idx
];
1558 gcc_checking_assert (cache_ref
.ref
== ref
.ref
);
1565 /* Flush the cache if it's nonnull. */
1568 pointer_query::flush_cache ()
1570 var_cache
.indices
.release ();
1571 var_cache
.access_refs
.release ();
1574 /* Dump statistics and, optionally, cache contents to DUMP_FILE. */
1577 pointer_query::dump (FILE *dump_file
, bool contents
/* = false */)
1579 unsigned nused
= 0, nrefs
= 0;
1580 unsigned nidxs
= var_cache
.indices
.length ();
1581 for (unsigned i
= 0; i
!= nidxs
; ++i
)
1583 unsigned ari
= var_cache
.indices
[i
];
1589 const access_ref
&aref
= var_cache
.access_refs
[ari
];
1596 fprintf (dump_file
, "pointer_query counters:\n"
1597 " index cache size: %u\n"
1598 " index entries: %u\n"
1599 " access cache size: %u\n"
1600 " access entries: %u\n"
1606 var_cache
.access_refs
.length (), nrefs
,
1607 hits
, misses
, failures
, max_depth
);
1609 if (!contents
|| !nidxs
)
1612 fputs ("\npointer_query cache contents:\n", dump_file
);
1614 for (unsigned i
= 0; i
!= nidxs
; ++i
)
1616 unsigned ari
= var_cache
.indices
[i
];
1620 const access_ref
&aref
= var_cache
.access_refs
[ari
];
1624 /* The level-1 cache index corresponds to the SSA_NAME_VERSION
1625 shifted left by one and ORed with the Object Size Type in
1626 the lowest bit. Print the two separately. */
1627 unsigned ver
= i
>> 1;
1628 unsigned ost
= i
& 1;
1630 fprintf (dump_file
, " %u.%u[%u]: ", ver
, ost
, ari
);
1631 if (tree name
= ssa_name (ver
))
1633 print_generic_expr (dump_file
, name
);
1634 fputs (" = ", dump_file
);
1637 fprintf (dump_file
, " _%u = ", ver
);
1639 aref
.dump (dump_file
);
1642 fputc ('\n', dump_file
);
1645 /* A helper of compute_objsize_r() to determine the size from an assignment
1646 statement STMT with the RHS of either MIN_EXPR or MAX_EXPR. On success
1647 set PREF->REF to the operand with more or less space remaining,
1648 respectively, if both refer to the same (sub)object, or to PTR if they
1649 might not, and return true. Otherwise, if the identity of neither
1650 operand can be determined, return false. */
1653 handle_min_max_size (tree ptr
, int ostype
, access_ref
*pref
,
1654 ssa_name_limit_t
&snlim
, pointer_query
*qry
)
1656 gimple
*stmt
= SSA_NAME_DEF_STMT (ptr
);
1657 const tree_code code
= gimple_assign_rhs_code (stmt
);
1659 /* In a valid MAX_/MIN_EXPR both operands must refer to the same array.
1660 Determine the size/offset of each and use the one with more or less
1661 space remaining, respectively. If either fails, use the information
1662 determined from the other instead, adjusted up or down as appropriate
1663 for the expression. */
1664 access_ref aref
[2] = { *pref
, *pref
};
1665 tree arg1
= gimple_assign_rhs1 (stmt
);
1666 if (!compute_objsize_r (arg1
, stmt
, false, ostype
, &aref
[0], snlim
, qry
))
1668 aref
[0].base0
= false;
1669 aref
[0].offrng
[0] = aref
[0].offrng
[1] = 0;
1670 aref
[0].add_max_offset ();
1671 aref
[0].set_max_size_range ();
1674 tree arg2
= gimple_assign_rhs2 (stmt
);
1675 if (!compute_objsize_r (arg2
, stmt
, false, ostype
, &aref
[1], snlim
, qry
))
1677 aref
[1].base0
= false;
1678 aref
[1].offrng
[0] = aref
[1].offrng
[1] = 0;
1679 aref
[1].add_max_offset ();
1680 aref
[1].set_max_size_range ();
1683 if (!aref
[0].ref
&& !aref
[1].ref
)
1684 /* Fail if the identity of neither argument could be determined. */
1688 if (aref
[0].ref
&& aref
[0].base0
)
1690 if (aref
[1].ref
&& aref
[1].base0
)
1692 /* If the object referenced by both arguments has been determined
1693 set *PREF to the one with more or less space remainng, whichever
1694 is appopriate for CODE.
1695 TODO: Indicate when the objects are distinct so it can be
1697 i0
= code
== MAX_EXPR
;
1698 const bool i1
= !i0
;
1700 if (aref
[i0
].size_remaining () < aref
[i1
].size_remaining ())
1705 if (aref
[i0
].ref
!= aref
[i1
].ref
)
1706 /* If the operands don't refer to the same (sub)object set
1707 PREF->REF to the SSA_NAME from which STMT was obtained
1708 so that both can be identified in a diagnostic. */
1714 /* If only the object referenced by one of the arguments could be
1715 determined, use it and... */
1722 const bool i1
= !i0
;
1723 /* ...see if the offset obtained from the other pointer can be used
1724 to tighten up the bound on the offset obtained from the first. */
1725 if ((code
== MAX_EXPR
&& aref
[i1
].offrng
[1] < aref
[i0
].offrng
[0])
1726 || (code
== MIN_EXPR
&& aref
[i0
].offrng
[0] < aref
[i1
].offrng
[1]))
1728 pref
->offrng
[0] = aref
[i0
].offrng
[0];
1729 pref
->offrng
[1] = aref
[i0
].offrng
[1];
1732 /* Replace PTR->REF with the SSA_NAME to indicate the expression
1733 might not refer to the same (sub)object. */
1738 /* A helper of compute_objsize_r() to determine the size of a DECL.
1739 Return true on success and (possibly in the future) false on failure. */
1742 handle_decl (tree decl
, bool addr
, access_ref
*pref
)
1744 tree decl_type
= TREE_TYPE (decl
);
1748 /* Reset the offset in case it was set by a prior call and not
1749 cleared by the caller. The offset is only adjusted after
1750 the identity of the object has been determined. */
1751 pref
->offrng
[0] = pref
->offrng
[1] = 0;
1753 if (!addr
&& POINTER_TYPE_P (decl_type
))
1755 /* Set the maximum size if the reference is to the pointer
1756 itself (as opposed to what it points to), and clear
1757 BASE0 since the offset isn't necessarily zero-based. */
1758 pref
->set_max_size_range ();
1759 pref
->base0
= false;
1763 /* Valid offsets into the object are nonnegative. */
1766 if (tree size
= decl_init_size (decl
, false))
1767 if (TREE_CODE (size
) == INTEGER_CST
)
1769 pref
->sizrng
[0] = wi::to_offset (size
);
1770 pref
->sizrng
[1] = pref
->sizrng
[0];
1774 pref
->set_max_size_range ();
1778 /* A helper of compute_objsize_r() to determine the size from ARRAY_REF
1779 AREF. ADDR is true if PTR is the operand of ADDR_EXPR. Return true
1780 on success and false on failure. */
1783 handle_array_ref (tree aref
, gimple
*stmt
, bool addr
, int ostype
,
1784 access_ref
*pref
, ssa_name_limit_t
&snlim
,
1787 gcc_assert (TREE_CODE (aref
) == ARRAY_REF
);
1789 tree arefop
= TREE_OPERAND (aref
, 0);
1790 tree reftype
= TREE_TYPE (arefop
);
1791 if (!addr
&& TREE_CODE (TREE_TYPE (reftype
)) == POINTER_TYPE
)
1792 /* Avoid arrays of pointers. FIXME: Hande pointers to arrays
1796 if (!compute_objsize_r (arefop
, stmt
, addr
, ostype
, pref
, snlim
, qry
))
1800 tree off
= pref
->eval (TREE_OPERAND (aref
, 1));
1801 range_query
*const rvals
= qry
? qry
->rvals
: NULL
;
1802 if (!get_offset_range (off
, stmt
, orng
, rvals
))
1804 /* Set ORNG to the maximum offset representable in ptrdiff_t. */
1805 orng
[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node
));
1806 orng
[0] = -orng
[1] - 1;
1809 /* Convert the array index range determined above to a byte offset. */
1810 tree lowbnd
= array_ref_low_bound (aref
);
1811 if (TREE_CODE (lowbnd
) == INTEGER_CST
&& !integer_zerop (lowbnd
))
1813 /* Adjust the index by the low bound of the array domain (0 in C/C++,
1814 1 in Fortran and anything in Ada) by applying the same processing
1815 as in get_offset_range. */
1816 const wide_int wlb
= wi::to_wide (lowbnd
);
1817 signop sgn
= SIGNED
;
1818 if (TYPE_UNSIGNED (TREE_TYPE (lowbnd
))
1819 && wlb
.get_precision () < TYPE_PRECISION (sizetype
))
1821 const offset_int lb
= offset_int::from (wlb
, sgn
);
1826 tree eltype
= TREE_TYPE (aref
);
1827 tree tpsize
= TYPE_SIZE_UNIT (eltype
);
1828 if (!tpsize
|| TREE_CODE (tpsize
) != INTEGER_CST
)
1830 pref
->add_max_offset ();
1834 offset_int sz
= wi::to_offset (tpsize
);
1838 if (ostype
&& TREE_CODE (eltype
) == ARRAY_TYPE
)
1840 /* Except for the permissive raw memory functions which use
1841 the size of the whole object determined above, use the size
1842 of the referenced array. Because the overall offset is from
1843 the beginning of the complete array object add this overall
1844 offset to the size of array. */
1845 offset_int sizrng
[2] =
1847 pref
->offrng
[0] + orng
[0] + sz
,
1848 pref
->offrng
[1] + orng
[1] + sz
1850 if (sizrng
[1] < sizrng
[0])
1851 std::swap (sizrng
[0], sizrng
[1]);
1852 if (sizrng
[0] >= 0 && sizrng
[0] <= pref
->sizrng
[0])
1853 pref
->sizrng
[0] = sizrng
[0];
1854 if (sizrng
[1] >= 0 && sizrng
[1] <= pref
->sizrng
[1])
1855 pref
->sizrng
[1] = sizrng
[1];
1858 pref
->add_offset (orng
[0], orng
[1]);
1862 /* Given a COMPONENT_REF CREF, set *PREF size to the size of the referenced
1866 set_component_ref_size (tree cref
, access_ref
*pref
)
1868 const tree base
= TREE_OPERAND (cref
, 0);
1869 const tree base_type
= TREE_TYPE (base
);
1871 /* SAM is set for array members that might need special treatment. */
1872 special_array_member sam
;
1873 tree size
= component_ref_size (cref
, &sam
);
1874 if (sam
== special_array_member::int_0
)
1875 pref
->sizrng
[0] = pref
->sizrng
[1] = 0;
1876 else if (!pref
->trail1special
&& sam
== special_array_member::trail_1
)
1877 pref
->sizrng
[0] = pref
->sizrng
[1] = 1;
1878 else if (size
&& TREE_CODE (size
) == INTEGER_CST
)
1879 pref
->sizrng
[0] = pref
->sizrng
[1] = wi::to_offset (size
);
1882 /* When the size of the member is unknown it's either a flexible
1883 array member or a trailing special array member (either zero
1884 length or one-element). Set the size to the maximum minus
1885 the constant size of the base object's type. */
1886 pref
->sizrng
[0] = 0;
1887 pref
->sizrng
[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node
));
1888 if (tree base_size
= TYPE_SIZE_UNIT (base_type
))
1889 if (TREE_CODE (base_size
) == INTEGER_CST
)
1890 pref
->sizrng
[1] -= wi::to_offset (base_size
);
1894 /* A helper of compute_objsize_r() to determine the size from COMPONENT_REF
1895 CREF. Return true on success and false on failure. */
1898 handle_component_ref (tree cref
, gimple
*stmt
, bool addr
, int ostype
,
1899 access_ref
*pref
, ssa_name_limit_t
&snlim
,
1902 gcc_assert (TREE_CODE (cref
) == COMPONENT_REF
);
1904 const tree base
= TREE_OPERAND (cref
, 0);
1905 const tree field
= TREE_OPERAND (cref
, 1);
1906 access_ref base_ref
= *pref
;
1908 /* Unconditionally determine the size of the base object (it could
1909 be smaller than the referenced member when the object is stored
1910 in a buffer with an insufficient size). */
1911 if (!compute_objsize_r (base
, stmt
, addr
, 0, &base_ref
, snlim
, qry
))
1914 /* Add the offset of the member to the offset into the object computed
1916 tree offset
= byte_position (field
);
1917 if (TREE_CODE (offset
) == INTEGER_CST
)
1918 base_ref
.add_offset (wi::to_offset (offset
));
1920 base_ref
.add_max_offset ();
1923 /* PREF->REF may have been already set to an SSA_NAME earlier
1924 to provide better context for diagnostics. In that case,
1925 leave it unchanged. */
1926 base_ref
.ref
= base
;
1928 const tree base_type
= TREE_TYPE (base
);
1929 if (TREE_CODE (base_type
) == UNION_TYPE
)
1930 /* In accesses through union types consider the entire unions
1931 rather than just their members. */
1936 /* In OSTYPE zero (for raw memory functions like memcpy), use
1937 the maximum size instead if the identity of the enclosing
1938 object cannot be determined. */
1945 if (!addr
&& POINTER_TYPE_P (TREE_TYPE (field
)))
1947 /* Set maximum size if the reference is to the pointer member
1948 itself (as opposed to what it points to). */
1949 pref
->set_max_size_range ();
1953 set_component_ref_size (cref
, pref
);
1955 if (base_ref
.size_remaining () < pref
->size_remaining ())
1956 /* Use the base object if it's smaller than the member. */
1962 /* A helper of compute_objsize_r() to determine the size from MEM_REF
1963 MREF. Return true on success and false on failure. */
1966 handle_mem_ref (tree mref
, gimple
*stmt
, int ostype
, access_ref
*pref
,
1967 ssa_name_limit_t
&snlim
, pointer_query
*qry
)
1969 gcc_assert (TREE_CODE (mref
) == MEM_REF
);
1971 tree mreftype
= TYPE_MAIN_VARIANT (TREE_TYPE (mref
));
1972 if (VECTOR_TYPE_P (mreftype
))
1974 /* Hack: Handle MEM_REFs of vector types as those to complete
1975 objects; those may be synthesized from multiple assignments
1976 to consecutive data members (see PR 93200 and 96963).
1977 FIXME: Vectorized assignments should only be present after
1978 vectorization so this hack is only necessary after it has
1979 run and could be avoided in calls from prior passes (e.g.,
1980 tree-ssa-strlen.cc).
1981 FIXME: Deal with this more generally, e.g., by marking up
1982 such MEM_REFs at the time they're created. */
1986 tree mrefop
= TREE_OPERAND (mref
, 0);
1987 if (!compute_objsize_r (mrefop
, stmt
, false, ostype
, pref
, snlim
, qry
))
1993 tree off
= pref
->eval (TREE_OPERAND (mref
, 1));
1994 range_query
*const rvals
= qry
? qry
->rvals
: NULL
;
1995 if (!get_offset_range (off
, stmt
, orng
, rvals
))
1997 /* Set ORNG to the maximum offset representable in ptrdiff_t. */
1998 orng
[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node
));
1999 orng
[0] = -orng
[1] - 1;
2002 pref
->add_offset (orng
[0], orng
[1]);
2006 /* A helper of compute_objsize_r() to determine the size from SSA_NAME
2007 PTR. Return true on success and false on failure. */
2010 handle_ssa_name (tree ptr
, bool addr
, int ostype
,
2011 access_ref
*pref
, ssa_name_limit_t
&snlim
,
2017 /* Only process an SSA_NAME if the recursion limit has not yet
2021 if (++qry
->depth
> qry
->max_depth
)
2022 qry
->max_depth
= qry
->depth
;
2023 if (const access_ref
*cache_ref
= qry
->get_ref (ptr
, ostype
))
2025 /* Add the number of DEREFerences accummulated so far. */
2026 const int deref
= pref
->deref
;
2028 pref
->deref
+= deref
;
2033 gimple
*stmt
= SSA_NAME_DEF_STMT (ptr
);
2034 if (is_gimple_call (stmt
))
2036 /* If STMT is a call to an allocation function get the size
2037 from its argument(s). If successful, also set *PREF->REF
2038 to PTR for the caller to include in diagnostics. */
2040 range_query
*const rvals
= qry
? qry
->rvals
: NULL
;
2041 if (gimple_call_alloc_size (stmt
, wr
, rvals
))
2044 pref
->sizrng
[0] = offset_int::from (wr
[0], UNSIGNED
);
2045 pref
->sizrng
[1] = offset_int::from (wr
[1], UNSIGNED
);
2046 /* Constrain both bounds to a valid size. */
2047 offset_int maxsize
= wi::to_offset (max_object_size ());
2048 if (pref
->sizrng
[0] > maxsize
)
2049 pref
->sizrng
[0] = maxsize
;
2050 if (pref
->sizrng
[1] > maxsize
)
2051 pref
->sizrng
[1] = maxsize
;
2055 /* For functions known to return one of their pointer arguments
2056 try to determine what the returned pointer points to, and on
2057 success add OFFRNG which was set to the offset added by
2058 the function (e.g., memchr) to the overall offset. */
2060 offset_int offrng
[2];
2061 if (tree ret
= gimple_call_return_array (stmt
, offrng
, &past_end
,
2064 if (!compute_objsize_r (ret
, stmt
, addr
, ostype
, pref
, snlim
, qry
))
2067 /* Cap OFFRNG[1] to at most the remaining size of
2069 offset_int remrng
[2];
2070 remrng
[1] = pref
->size_remaining (remrng
);
2071 if (remrng
[1] != 0 && !past_end
)
2072 /* Decrement the size for functions that never return
2073 a past-the-end pointer. */
2076 if (remrng
[1] < offrng
[1])
2077 offrng
[1] = remrng
[1];
2078 pref
->add_offset (offrng
[0], offrng
[1]);
2082 /* For other calls that might return arbitrary pointers
2083 including into the middle of objects set the size
2084 range to maximum, clear PREF->BASE0, and also set
2085 PREF->REF to include in diagnostics. */
2086 pref
->set_max_size_range ();
2087 pref
->base0
= false;
2091 qry
->put_ref (ptr
, *pref
, ostype
);
2095 if (gimple_nop_p (stmt
))
2097 /* For a function argument try to determine the byte size
2098 of the array from the current function declaratation
2099 (e.g., attribute access or related). */
2101 bool static_array
= false;
2102 if (tree ref
= gimple_parm_array_size (ptr
, wr
, &static_array
))
2104 pref
->parmarray
= !static_array
;
2105 pref
->sizrng
[0] = offset_int::from (wr
[0], UNSIGNED
);
2106 pref
->sizrng
[1] = offset_int::from (wr
[1], UNSIGNED
);
2108 qry
->put_ref (ptr
, *pref
, ostype
);
2112 pref
->set_max_size_range ();
2113 pref
->base0
= false;
2115 qry
->put_ref (ptr
, *pref
, ostype
);
2119 if (gimple_code (stmt
) == GIMPLE_PHI
)
2121 /* Pass PTR to get_ref() via PREF. If all PHI arguments refer
2122 to the same object the function will replace it with it. */
2124 access_ref phi_ref
= *pref
;
2125 if (!pref
->get_ref (NULL
, &phi_ref
, ostype
, &snlim
, qry
))
2128 qry
->put_ref (ptr
, *pref
, ostype
);
2132 if (!is_gimple_assign (stmt
))
2134 /* Clear BASE0 since the assigned pointer might point into
2135 the middle of the object, set the maximum size range and,
2136 if the SSA_NAME refers to a function argumnent, set
2138 pref
->base0
= false;
2139 pref
->set_max_size_range ();
2144 tree_code code
= gimple_assign_rhs_code (stmt
);
2146 if (code
== MAX_EXPR
|| code
== MIN_EXPR
)
2148 if (!handle_min_max_size (ptr
, ostype
, pref
, snlim
, qry
))
2151 qry
->put_ref (ptr
, *pref
, ostype
);
2155 tree rhs
= gimple_assign_rhs1 (stmt
);
2157 if (code
== POINTER_PLUS_EXPR
2158 && TREE_CODE (TREE_TYPE (rhs
)) == POINTER_TYPE
)
2160 /* Compute the size of the object first. */
2161 if (!compute_objsize_r (rhs
, stmt
, addr
, ostype
, pref
, snlim
, qry
))
2165 tree off
= gimple_assign_rhs2 (stmt
);
2166 range_query
*const rvals
= qry
? qry
->rvals
: NULL
;
2167 if (get_offset_range (off
, stmt
, orng
, rvals
))
2168 pref
->add_offset (orng
[0], orng
[1]);
2170 pref
->add_max_offset ();
2172 qry
->put_ref (ptr
, *pref
, ostype
);
2176 if (code
== ADDR_EXPR
|| code
== SSA_NAME
)
2178 if (!compute_objsize_r (rhs
, stmt
, addr
, ostype
, pref
, snlim
, qry
))
2180 qry
->put_ref (ptr
, *pref
, ostype
);
2184 if (ostype
> 1 && POINTER_TYPE_P (TREE_TYPE (rhs
)))
2186 /* When determining the qualifiers follow the pointer but
2187 avoid caching the result. As the pointer is added to
2188 and/or dereferenced the computed size and offset need
2189 not be meaningful for other queries involving the same
2191 if (!compute_objsize_r (rhs
, stmt
, addr
, ostype
, pref
, snlim
, qry
))
2197 /* (This could also be an assignment from a nonlocal pointer.) Save
2198 PTR to mention in diagnostics but otherwise treat it as a pointer
2199 to an unknown object. */
2201 pref
->base0
= false;
2202 pref
->set_max_size_range ();
2206 /* Helper to compute the size of the object referenced by the PTR
2207 expression which must have pointer type, using Object Size type
2208 OSTYPE (only the least significant 2 bits are used).
2209 On success, sets PREF->REF to the DECL of the referenced object
2210 if it's unique, otherwise to null, PREF->OFFRNG to the range of
2211 offsets into it, and PREF->SIZRNG to the range of sizes of
2213 ADDR is true for an enclosing ADDR_EXPR.
2214 SNLIM is used to avoid visiting the same PHI operand multiple
2215 times, and, when nonnull, RVALS to determine range information.
2216 Returns true on success, false when a meaningful size (or range)
2217 cannot be determined.
2219 The function is intended for diagnostics and should not be used
2220 to influence code generation or optimization. */
2223 compute_objsize_r (tree ptr
, gimple
*stmt
, bool addr
, int ostype
,
2224 access_ref
*pref
, ssa_name_limit_t
&snlim
,
2230 return handle_decl (ptr
, addr
, pref
);
2232 switch (TREE_CODE (ptr
))
2236 tree ref
= TREE_OPERAND (ptr
, 0);
2237 if (!compute_objsize_r (ref
, stmt
, true, ostype
, pref
, snlim
, qry
))
2246 tree ref
= TREE_OPERAND (ptr
, 0);
2247 if (!compute_objsize_r (ref
, stmt
, addr
, ostype
, pref
, snlim
, qry
))
2250 offset_int off
= wi::to_offset (pref
->eval (TREE_OPERAND (ptr
, 2)));
2251 pref
->add_offset (off
/ BITS_PER_UNIT
);
2256 return handle_array_ref (ptr
, stmt
, addr
, ostype
, pref
, snlim
, qry
);
2259 return handle_component_ref (ptr
, stmt
, addr
, ostype
, pref
, snlim
, qry
);
2262 return handle_mem_ref (ptr
, stmt
, ostype
, pref
, snlim
, qry
);
2264 case TARGET_MEM_REF
:
2266 tree ref
= TREE_OPERAND (ptr
, 0);
2267 if (!compute_objsize_r (ref
, stmt
, addr
, ostype
, pref
, snlim
, qry
))
2270 /* TODO: Handle remaining operands. Until then, add maximum offset. */
2272 pref
->add_max_offset ();
2277 /* Pointer constants other than null smaller than param_min_pagesize
2278 might be the result of erroneous null pointer addition/subtraction.
2279 Unless zero is a valid address set size to zero. For null pointers,
2280 set size to the maximum for now since those may be the result of
2281 jump threading. Similarly, for values >= param_min_pagesize in
2282 order to support (type *) 0x7cdeab00. */
2283 if (integer_zerop (ptr
)
2284 || wi::to_widest (ptr
) >= param_min_pagesize
)
2285 pref
->set_max_size_range ();
2286 else if (POINTER_TYPE_P (TREE_TYPE (ptr
)))
2288 tree deref_type
= TREE_TYPE (TREE_TYPE (ptr
));
2289 addr_space_t as
= TYPE_ADDR_SPACE (deref_type
);
2290 if (targetm
.addr_space
.zero_address_valid (as
))
2291 pref
->set_max_size_range ();
2294 pref
->sizrng
[0] = pref
->sizrng
[1] = 0;
2295 pref
->ref_nullptr_p
= true;
2299 pref
->sizrng
[0] = pref
->sizrng
[1] = 0;
2305 pref
->sizrng
[0] = pref
->sizrng
[1] = TREE_STRING_LENGTH (ptr
);
2309 case POINTER_PLUS_EXPR
:
2311 tree ref
= TREE_OPERAND (ptr
, 0);
2312 if (!compute_objsize_r (ref
, stmt
, addr
, ostype
, pref
, snlim
, qry
))
2315 /* The below only makes sense if the offset is being applied to the
2316 address of the object. */
2317 if (pref
->deref
!= -1)
2321 tree off
= pref
->eval (TREE_OPERAND (ptr
, 1));
2322 if (get_offset_range (off
, stmt
, orng
, qry
->rvals
))
2323 pref
->add_offset (orng
[0], orng
[1]);
2325 pref
->add_max_offset ();
2329 case VIEW_CONVERT_EXPR
:
2330 ptr
= TREE_OPERAND (ptr
, 0);
2331 return compute_objsize_r (ptr
, stmt
, addr
, ostype
, pref
, snlim
, qry
);
2334 return handle_ssa_name (ptr
, addr
, ostype
, pref
, snlim
, qry
);
2340 /* Assume all other expressions point into an unknown object
2341 of the maximum valid size. */
2343 pref
->base0
= false;
2344 pref
->set_max_size_range ();
2345 if (TREE_CODE (ptr
) == SSA_NAME
)
2346 qry
->put_ref (ptr
, *pref
);
2350 /* A "public" wrapper around the above. Clients should use this overload
2354 compute_objsize (tree ptr
, gimple
*stmt
, int ostype
, access_ref
*pref
,
2355 pointer_query
*ptr_qry
)
2363 /* Clear and invalidate in case *PREF is being reused. */
2364 pref
->offrng
[0] = pref
->offrng
[1] = 0;
2365 pref
->sizrng
[0] = pref
->sizrng
[1] = -1;
2367 ssa_name_limit_t snlim
;
2368 if (!compute_objsize_r (ptr
, stmt
, false, ostype
, pref
, snlim
, ptr_qry
))
2371 offset_int maxsize
= pref
->size_remaining ();
2372 if (pref
->base0
&& pref
->offrng
[0] < 0 && pref
->offrng
[1] >= 0)
2373 pref
->offrng
[0] = 0;
2374 return wide_int_to_tree (sizetype
, maxsize
);
2377 /* Transitional wrapper. The function should be removed once callers
2378 transition to the pointer_query API. */
2381 compute_objsize (tree ptr
, gimple
*stmt
, int ostype
, access_ref
*pref
,
2382 range_query
*rvals
/* = NULL */)
2386 return compute_objsize (ptr
, stmt
, ostype
, pref
, &qry
);
2389 /* Legacy wrapper around the above. The function should be removed
2390 once callers transition to one of the two above. */
2393 compute_objsize (tree ptr
, gimple
*stmt
, int ostype
, tree
*pdecl
/* = NULL */,
2394 tree
*poff
/* = NULL */, range_query
*rvals
/* = NULL */)
2396 /* Set the initial offsets to zero and size to negative to indicate
2397 none has been computed yet. */
2399 tree size
= compute_objsize (ptr
, stmt
, ostype
, &ref
, rvals
);
2400 if (!size
|| !ref
.base0
)
2407 *poff
= wide_int_to_tree (ptrdiff_type_node
, ref
.offrng
[ref
.offrng
[0] < 0]);
2412 /* Determine the offset *FLDOFF of the first byte of a struct member
2413 of TYPE (possibly recursively) into which the byte offset OFF points,
2414 starting after the field START_AFTER if it's non-null. On success,
2415 if nonnull, set *FLDOFF to the offset of the first byte, and return
2416 the field decl. If nonnull, set *NEXTOFF to the offset of the next
2417 field (which reflects any padding between the returned field and
2418 the next). Otherwise, if no such member can be found, return null. */
2421 field_at_offset (tree type
, tree start_after
, HOST_WIDE_INT off
,
2422 HOST_WIDE_INT
*fldoff
/* = nullptr */,
2423 HOST_WIDE_INT
*nextoff
/* = nullptr */)
2425 tree first_fld
= TYPE_FIELDS (type
);
2427 HOST_WIDE_INT offbuf
= 0, nextbuf
= 0;
2435 /* The field to return. */
2436 tree last_fld
= NULL_TREE
;
2437 /* The next field to advance to. */
2438 tree next_fld
= NULL_TREE
;
2440 /* NEXT_FLD's cached offset. */
2441 HOST_WIDE_INT next_pos
= -1;
2443 for (tree fld
= first_fld
; fld
; fld
= next_fld
)
2447 /* Advance to the next relevant data member. */
2448 next_fld
= TREE_CHAIN (next_fld
);
2450 && (TREE_CODE (next_fld
) != FIELD_DECL
2451 || DECL_ARTIFICIAL (next_fld
)));
2453 if (TREE_CODE (fld
) != FIELD_DECL
|| DECL_ARTIFICIAL (fld
))
2456 if (fld
== start_after
)
2459 tree fldtype
= TREE_TYPE (fld
);
2460 /* The offset of FLD within its immediately enclosing structure. */
2461 HOST_WIDE_INT fldpos
= next_pos
< 0 ? int_byte_position (fld
) : next_pos
;
2463 tree typesize
= TYPE_SIZE_UNIT (fldtype
);
2464 if (typesize
&& TREE_CODE (typesize
) != INTEGER_CST
)
2465 /* Bail if FLD is a variable length member. */
2468 /* If the size is not available the field is a flexible array
2469 member. Treat this case as success. */
2470 HOST_WIDE_INT fldsize
= (tree_fits_uhwi_p (typesize
)
2471 ? tree_to_uhwi (typesize
)
2474 /* If OFF is beyond the end of the current field continue. */
2475 HOST_WIDE_INT fldend
= fldpos
+ fldsize
;
2481 /* If OFF is equal to the offset of the next field continue
2482 to it and skip the array/struct business below. */
2483 tree pos
= byte_position (next_fld
);
2484 if (!tree_fits_shwi_p (pos
))
2485 /* Bail if NEXT_FLD is a variable length member. */
2487 next_pos
= tree_to_shwi (pos
);
2488 *nextoff
= *fldoff
+ next_pos
;
2489 if (*nextoff
== off
&& TREE_CODE (type
) != UNION_TYPE
)
2493 *nextoff
= HOST_WIDE_INT_MAX
;
2495 /* OFF refers somewhere into the current field or just past its end,
2496 which could mean it refers to the next field. */
2497 if (TREE_CODE (fldtype
) == ARRAY_TYPE
)
2499 /* Will be set to the offset of the first byte of the array
2500 element (which may be an array) of FLDTYPE into which
2501 OFF - FLDPOS points (which may be past ELTOFF). */
2502 HOST_WIDE_INT eltoff
= 0;
2503 if (tree ft
= array_elt_at_offset (fldtype
, off
- fldpos
, &eltoff
))
2508 /* Advance the position to include the array element above.
2509 If OFF - FLPOS refers to a member of FLDTYPE, the member
2510 will be determined below. */
2516 if (TREE_CODE (fldtype
) == RECORD_TYPE
)
2517 /* Drill down into the current field if it's a struct. */
2518 fld
= field_at_offset (fldtype
, start_after
, off
- fldpos
,
2523 /* Unless the offset is just past the end of the field return it.
2524 Otherwise save it and return it only if the offset of the next
2525 next field is greater (i.e., there is padding between the two)
2526 or if there is no next field. */
2531 if (*nextoff
== HOST_WIDE_INT_MAX
&& next_fld
)
2532 *nextoff
= next_pos
;
2537 /* Determine the offset *ELTOFF of the first byte of the array element
2538 of array ARTYPE into which the byte offset OFF points. On success
2539 set *ELTOFF to the offset of the first byte and return type.
2540 Otherwise, if no such element can be found, return null. */
2543 array_elt_at_offset (tree artype
, HOST_WIDE_INT off
,
2544 HOST_WIDE_INT
*eltoff
/* = nullptr */,
2545 HOST_WIDE_INT
*subar_size
/* = nullptr */)
2547 gcc_assert (TREE_CODE (artype
) == ARRAY_TYPE
);
2549 HOST_WIDE_INT dummy
;
2553 subar_size
= &dummy
;
2555 tree eltype
= artype
;
2556 while (TREE_CODE (TREE_TYPE (eltype
)) == ARRAY_TYPE
)
2557 eltype
= TREE_TYPE (eltype
);
2559 tree subartype
= eltype
;
2560 if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (eltype
))
2561 || TYPE_MODE (TREE_TYPE (eltype
)) != TYPE_MODE (char_type_node
))
2562 eltype
= TREE_TYPE (eltype
);
2564 *subar_size
= int_size_in_bytes (subartype
);
2566 if (eltype
== artype
)
2572 HOST_WIDE_INT artype_size
= int_size_in_bytes (artype
);
2573 HOST_WIDE_INT eltype_size
= int_size_in_bytes (eltype
);
2575 if (off
< artype_size
)// * eltype_size)
2577 *eltoff
= (off
/ eltype_size
) * eltype_size
;
2578 return TREE_CODE (eltype
) == ARRAY_TYPE
? TREE_TYPE (eltype
) : eltype
;
2584 /* Wrapper around build_array_type_nelts that makes sure the array
2585 can be created at all and handles zero sized arrays specially. */
2588 build_printable_array_type (tree eltype
, unsigned HOST_WIDE_INT nelts
)
2590 if (TYPE_SIZE_UNIT (eltype
)
2591 && TREE_CODE (TYPE_SIZE_UNIT (eltype
)) == INTEGER_CST
2592 && !integer_zerop (TYPE_SIZE_UNIT (eltype
))
2593 && TYPE_ALIGN_UNIT (eltype
) > 1
2594 && wi::zext (wi::to_wide (TYPE_SIZE_UNIT (eltype
)),
2595 ffs_hwi (TYPE_ALIGN_UNIT (eltype
)) - 1) != 0)
2596 eltype
= TYPE_MAIN_VARIANT (eltype
);
2598 /* Consider excessive NELTS an array of unknown bound. */
2599 tree idxtype
= NULL_TREE
;
2600 if (nelts
< HOST_WIDE_INT_MAX
)
2603 return build_array_type_nelts (eltype
, nelts
);
2604 idxtype
= build_range_type (sizetype
, size_zero_node
, NULL_TREE
);
2607 tree arrtype
= build_array_type (eltype
, idxtype
);
2608 arrtype
= build_distinct_type_copy (TYPE_MAIN_VARIANT (arrtype
));
2609 TYPE_SIZE (arrtype
) = bitsize_zero_node
;
2610 TYPE_SIZE_UNIT (arrtype
) = size_zero_node
;