1 /* Definitions of the pointer_query and related classes.
3 Copyright (C) 2020-2022 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it under
8 the terms of the GNU General Public License as published by the Free
9 Software Foundation; either version 3, or (at your option) any later
12 GCC is distributed in the hope that it will be useful, but WITHOUT ANY
13 WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
23 #include "coretypes.h"
27 #include "stringpool.h"
29 #include "diagnostic-core.h"
30 #include "fold-const.h"
31 #include "tree-object-size.h"
32 #include "tree-ssa-strlen.h"
33 #include "langhooks.h"
34 #include "stringpool.h"
36 #include "gimple-iterator.h"
37 #include "gimple-fold.h"
38 #include "gimple-ssa.h"
40 #include "attr-fnspec.h"
41 #include "gimple-range.h"
42 #include "pointer-query.h"
43 #include "tree-pretty-print.h"
44 #include "tree-ssanames.h"
47 static bool compute_objsize_r (tree
, gimple
*, bool, int, access_ref
*,
48 ssa_name_limit_t
&, pointer_query
*);
50 /* Wrapper around the wide_int overload of get_range that accepts
51 offset_int instead. For middle end expressions returns the same
52 result. For a subset of nonconstamt expressions emitted by the front
53 end determines a more precise range than would be possible otherwise. */
56 get_offset_range (tree x
, gimple
*stmt
, offset_int r
[2], range_query
*rvals
)
59 if (TREE_CODE (x
) == PLUS_EXPR
)
61 /* Handle constant offsets in pointer addition expressions seen
62 n the front end IL. */
63 tree op
= TREE_OPERAND (x
, 1);
64 if (TREE_CODE (op
) == INTEGER_CST
)
66 op
= fold_convert (signed_type_for (TREE_TYPE (op
)), op
);
67 add
= wi::to_offset (op
);
68 x
= TREE_OPERAND (x
, 0);
72 if (TREE_CODE (x
) == NOP_EXPR
)
73 /* Also handle conversions to sizetype seen in the front end IL. */
74 x
= TREE_OPERAND (x
, 0);
76 tree type
= TREE_TYPE (x
);
77 if (!INTEGRAL_TYPE_P (type
) && !POINTER_TYPE_P (type
))
80 if (TREE_CODE (x
) != INTEGER_CST
81 && TREE_CODE (x
) != SSA_NAME
)
83 if (TYPE_UNSIGNED (type
)
84 && TYPE_PRECISION (type
) == TYPE_PRECISION (sizetype
))
85 type
= signed_type_for (type
);
87 r
[0] = wi::to_offset (TYPE_MIN_VALUE (type
)) + add
;
88 r
[1] = wi::to_offset (TYPE_MAX_VALUE (type
)) + add
;
93 if (!get_range (x
, stmt
, wr
, rvals
))
97 /* Only convert signed integers or unsigned sizetype to a signed
98 offset and avoid converting large positive values in narrower
99 types to negative offsets. */
100 if (TYPE_UNSIGNED (type
)
101 && wr
[0].get_precision () < TYPE_PRECISION (sizetype
))
104 r
[0] = offset_int::from (wr
[0], sgn
);
105 r
[1] = offset_int::from (wr
[1], sgn
);
109 /* Return the argument that the call STMT to a built-in function returns
110 or null if it doesn't. On success, set OFFRNG[] to the range of offsets
111 from the argument reflected in the value returned by the built-in if it
112 can be determined, otherwise to 0 and HWI_M1U respectively. Set
113 *PAST_END for functions like mempcpy that might return a past the end
114 pointer (most functions return a dereferenceable pointer to an existing
115 element of an array). */
118 gimple_call_return_array (gimple
*stmt
, offset_int offrng
[2], bool *past_end
,
119 ssa_name_limit_t
&snlim
, pointer_query
*qry
)
121 /* Clear and set below for the rare function(s) that might return
122 a past-the-end pointer. */
126 /* Check for attribute fn spec to see if the function returns one
128 attr_fnspec fnspec
= gimple_call_fnspec (as_a
<gcall
*>(stmt
));
130 if (fnspec
.returns_arg (&argno
))
132 /* Functions return the first argument (not a range). */
133 offrng
[0] = offrng
[1] = 0;
134 return gimple_call_arg (stmt
, argno
);
138 if (gimple_call_num_args (stmt
) < 1)
141 tree fn
= gimple_call_fndecl (stmt
);
142 if (!gimple_call_builtin_p (stmt
, BUILT_IN_NORMAL
))
144 /* See if this is a call to placement new. */
146 || !DECL_IS_OPERATOR_NEW_P (fn
)
147 || DECL_IS_REPLACEABLE_OPERATOR_NEW_P (fn
))
150 /* Check the mangling, keeping in mind that operator new takes
151 a size_t which could be unsigned int or unsigned long. */
152 tree fname
= DECL_ASSEMBLER_NAME (fn
);
153 if (!id_equal (fname
, "_ZnwjPv") // ordinary form
154 && !id_equal (fname
, "_ZnwmPv") // ordinary form
155 && !id_equal (fname
, "_ZnajPv") // array form
156 && !id_equal (fname
, "_ZnamPv")) // array form
159 if (gimple_call_num_args (stmt
) != 2)
162 /* Allocation functions return a pointer to the beginning. */
163 offrng
[0] = offrng
[1] = 0;
164 return gimple_call_arg (stmt
, 1);
167 switch (DECL_FUNCTION_CODE (fn
))
169 case BUILT_IN_MEMCPY
:
170 case BUILT_IN_MEMCPY_CHK
:
171 case BUILT_IN_MEMMOVE
:
172 case BUILT_IN_MEMMOVE_CHK
:
173 case BUILT_IN_MEMSET
:
174 case BUILT_IN_STRCAT
:
175 case BUILT_IN_STRCAT_CHK
:
176 case BUILT_IN_STRCPY
:
177 case BUILT_IN_STRCPY_CHK
:
178 case BUILT_IN_STRNCAT
:
179 case BUILT_IN_STRNCAT_CHK
:
180 case BUILT_IN_STRNCPY
:
181 case BUILT_IN_STRNCPY_CHK
:
182 /* Functions return the first argument (not a range). */
183 offrng
[0] = offrng
[1] = 0;
184 return gimple_call_arg (stmt
, 0);
186 case BUILT_IN_MEMPCPY
:
187 case BUILT_IN_MEMPCPY_CHK
:
189 /* The returned pointer is in a range constrained by the smaller
190 of the upper bound of the size argument and the source object
193 offrng
[1] = HOST_WIDE_INT_M1U
;
194 tree off
= gimple_call_arg (stmt
, 2);
195 bool off_valid
= get_offset_range (off
, stmt
, offrng
, qry
->rvals
);
196 if (!off_valid
|| offrng
[0] != offrng
[1])
198 /* If the offset is either indeterminate or in some range,
199 try to constrain its upper bound to at most the size
200 of the source object. */
202 tree src
= gimple_call_arg (stmt
, 1);
203 if (compute_objsize_r (src
, stmt
, false, 1, &aref
, snlim
, qry
)
204 && aref
.sizrng
[1] < offrng
[1])
205 offrng
[1] = aref
.sizrng
[1];
208 /* Mempcpy may return a past-the-end pointer. */
210 return gimple_call_arg (stmt
, 0);
213 case BUILT_IN_MEMCHR
:
215 tree off
= gimple_call_arg (stmt
, 2);
216 if (get_offset_range (off
, stmt
, offrng
, qry
->rvals
))
219 offrng
[1] = HOST_WIDE_INT_M1U
;
222 return gimple_call_arg (stmt
, 0);
225 case BUILT_IN_STRCHR
:
226 case BUILT_IN_STRRCHR
:
227 case BUILT_IN_STRSTR
:
229 offrng
[1] = HOST_WIDE_INT_M1U
;
230 return gimple_call_arg (stmt
, 0);
232 case BUILT_IN_STPCPY
:
233 case BUILT_IN_STPCPY_CHK
:
236 tree src
= gimple_call_arg (stmt
, 1);
237 if (compute_objsize_r (src
, stmt
, false, 1, &aref
, snlim
, qry
))
238 offrng
[1] = aref
.sizrng
[1] - 1;
240 offrng
[1] = HOST_WIDE_INT_M1U
;
243 return gimple_call_arg (stmt
, 0);
246 case BUILT_IN_STPNCPY
:
247 case BUILT_IN_STPNCPY_CHK
:
249 /* The returned pointer is in a range between the first argument
250 and it plus the smaller of the upper bound of the size argument
251 and the source object size. */
252 offrng
[1] = HOST_WIDE_INT_M1U
;
253 tree off
= gimple_call_arg (stmt
, 2);
254 if (!get_offset_range (off
, stmt
, offrng
, qry
->rvals
)
255 || offrng
[0] != offrng
[1])
257 /* If the offset is either indeterminate or in some range,
258 try to constrain its upper bound to at most the size
259 of the source object. */
261 tree src
= gimple_call_arg (stmt
, 1);
262 if (compute_objsize_r (src
, stmt
, false, 1, &aref
, snlim
, qry
)
263 && aref
.sizrng
[1] < offrng
[1])
264 offrng
[1] = aref
.sizrng
[1];
267 /* When the source is the empty string the returned pointer is
268 a copy of the argument. Otherwise stpcpy can also return
269 a past-the-end pointer. */
272 return gimple_call_arg (stmt
, 0);
282 /* Return true when EXP's range can be determined and set RANGE[] to it
283 after adjusting it if necessary to make EXP a represents a valid size
284 of object, or a valid size argument to an allocation function declared
285 with attribute alloc_size (whose argument may be signed), or to a string
286 manipulation function like memset.
287 When ALLOW_ZERO is set in FLAGS, allow returning a range of [0, 0] for
288 a size in an anti-range [1, N] where N > PTRDIFF_MAX. A zero range is
289 a (nearly) invalid argument to allocation functions like malloc but it
290 is a valid argument to functions like memset.
291 When USE_LARGEST is set in FLAGS set RANGE to the largest valid subrange
292 in a multi-range, otherwise to the smallest valid subrange. */
295 get_size_range (range_query
*query
, tree exp
, gimple
*stmt
, tree range
[2],
301 if (tree_fits_uhwi_p (exp
))
303 /* EXP is a constant. */
304 range
[0] = range
[1] = exp
;
308 tree exptype
= TREE_TYPE (exp
);
309 bool integral
= INTEGRAL_TYPE_P (exptype
);
312 enum value_range_kind range_type
;
315 query
= get_range_query (cfun
);
321 query
->range_of_expr (vr
, exp
, stmt
);
323 if (vr
.undefined_p ())
324 vr
.set_varying (TREE_TYPE (exp
));
325 range_type
= vr
.kind ();
326 min
= wi::to_wide (vr
.min ());
327 max
= wi::to_wide (vr
.max ());
330 range_type
= VR_VARYING
;
332 if (range_type
== VR_VARYING
)
336 /* Use the full range of the type of the expression when
337 no value range information is available. */
338 range
[0] = TYPE_MIN_VALUE (exptype
);
339 range
[1] = TYPE_MAX_VALUE (exptype
);
343 range
[0] = NULL_TREE
;
344 range
[1] = NULL_TREE
;
348 unsigned expprec
= TYPE_PRECISION (exptype
);
350 bool signed_p
= !TYPE_UNSIGNED (exptype
);
352 if (range_type
== VR_ANTI_RANGE
)
356 if (wi::les_p (max
, 0))
358 /* EXP is not in a strictly negative range. That means
359 it must be in some (not necessarily strictly) positive
360 range which includes zero. Since in signed to unsigned
361 conversions negative values end up converted to large
362 positive values, and otherwise they are not valid sizes,
363 the resulting range is in both cases [0, TYPE_MAX]. */
364 min
= wi::zero (expprec
);
365 max
= wi::to_wide (TYPE_MAX_VALUE (exptype
));
367 else if (wi::les_p (min
- 1, 0))
369 /* EXP is not in a negative-positive range. That means EXP
370 is either negative, or greater than max. Since negative
371 sizes are invalid make the range [MAX + 1, TYPE_MAX]. */
373 max
= wi::to_wide (TYPE_MAX_VALUE (exptype
));
378 min
= wi::zero (expprec
);
383 wide_int maxsize
= wi::to_wide (max_object_size ());
384 min
= wide_int::from (min
, maxsize
.get_precision (), UNSIGNED
);
385 max
= wide_int::from (max
, maxsize
.get_precision (), UNSIGNED
);
386 if (wi::eq_p (0, min
- 1))
388 /* EXP is unsigned and not in the range [1, MAX]. That means
389 it's either zero or greater than MAX. Even though 0 would
390 normally be detected by -Walloc-zero, unless ALLOW_ZERO
391 is set, set the range to [MAX, TYPE_MAX] so that when MAX
392 is greater than the limit the whole range is diagnosed. */
393 wide_int maxsize
= wi::to_wide (max_object_size ());
394 if (flags
& SR_ALLOW_ZERO
)
396 if (wi::leu_p (maxsize
, max
+ 1)
397 || !(flags
& SR_USE_LARGEST
))
398 min
= max
= wi::zero (expprec
);
402 max
= wi::to_wide (TYPE_MAX_VALUE (exptype
));
408 max
= wi::to_wide (TYPE_MAX_VALUE (exptype
));
411 else if ((flags
& SR_USE_LARGEST
)
412 && wi::ltu_p (max
+ 1, maxsize
))
414 /* When USE_LARGEST is set and the larger of the two subranges
415 is a valid size, use it... */
421 /* ...otherwise use the smaller subrange. */
423 min
= wi::zero (expprec
);
428 range
[0] = wide_int_to_tree (exptype
, min
);
429 range
[1] = wide_int_to_tree (exptype
, max
);
435 get_size_range (tree exp
, tree range
[2], int flags
/* = 0 */)
437 return get_size_range (/*query=*/NULL
, exp
, /*stmt=*/NULL
, range
, flags
);
440 /* If STMT is a call to an allocation function, returns the constant
441 maximum size of the object allocated by the call represented as
442 sizetype. If nonnull, sets RNG1[] to the range of the size.
443 When nonnull, uses RVALS for range information, otherwise gets global
445 Returns null when STMT is not a call to a valid allocation function. */
448 gimple_call_alloc_size (gimple
*stmt
, wide_int rng1
[2] /* = NULL */,
449 range_query
*qry
/* = NULL */)
451 if (!stmt
|| !is_gimple_call (stmt
))
455 if (tree fndecl
= gimple_call_fndecl (stmt
))
456 allocfntype
= TREE_TYPE (fndecl
);
458 allocfntype
= gimple_call_fntype (stmt
);
463 unsigned argidx1
= UINT_MAX
, argidx2
= UINT_MAX
;
464 tree at
= lookup_attribute ("alloc_size", TYPE_ATTRIBUTES (allocfntype
));
467 if (!gimple_call_builtin_p (stmt
, BUILT_IN_ALLOCA_WITH_ALIGN
))
473 unsigned nargs
= gimple_call_num_args (stmt
);
475 if (argidx1
== UINT_MAX
)
477 tree atval
= TREE_VALUE (at
);
481 argidx1
= TREE_INT_CST_LOW (TREE_VALUE (atval
)) - 1;
482 if (nargs
<= argidx1
)
485 atval
= TREE_CHAIN (atval
);
488 argidx2
= TREE_INT_CST_LOW (TREE_VALUE (atval
)) - 1;
489 if (nargs
<= argidx2
)
494 tree size
= gimple_call_arg (stmt
, argidx1
);
496 wide_int rng1_buf
[2];
497 /* If RNG1 is not set, use the buffer. */
501 /* Use maximum precision to avoid overflow below. */
502 const int prec
= ADDR_MAX_PRECISION
;
506 /* Determine the largest valid range size, including zero. */
507 if (!get_size_range (qry
, size
, stmt
, r
, SR_ALLOW_ZERO
| SR_USE_LARGEST
))
509 rng1
[0] = wi::to_wide (r
[0], prec
);
510 rng1
[1] = wi::to_wide (r
[1], prec
);
513 if (argidx2
> nargs
&& TREE_CODE (size
) == INTEGER_CST
)
514 return fold_convert (sizetype
, size
);
516 /* To handle ranges do the math in wide_int and return the product
517 of the upper bounds as a constant. Ignore anti-ranges. */
518 tree n
= argidx2
< nargs
? gimple_call_arg (stmt
, argidx2
) : integer_one_node
;
522 /* As above, use the full non-negative range on failure. */
523 if (!get_size_range (qry
, n
, stmt
, r
, SR_ALLOW_ZERO
| SR_USE_LARGEST
))
525 rng2
[0] = wi::to_wide (r
[0], prec
);
526 rng2
[1] = wi::to_wide (r
[1], prec
);
529 /* Compute products of both bounds for the caller but return the lesser
530 of SIZE_MAX and the product of the upper bounds as a constant. */
531 rng1
[0] = rng1
[0] * rng2
[0];
532 rng1
[1] = rng1
[1] * rng2
[1];
534 const tree size_max
= TYPE_MAX_VALUE (sizetype
);
535 if (wi::gtu_p (rng1
[1], wi::to_wide (size_max
, prec
)))
537 rng1
[1] = wi::to_wide (size_max
, prec
);
541 return wide_int_to_tree (sizetype
, rng1
[1]);
544 /* For an access to an object referenced to by the function parameter PTR
545 of pointer type, and set RNG[] to the range of sizes of the object
546 obtainedfrom the attribute access specification for the current function.
547 Set STATIC_ARRAY if the array parameter has been declared [static].
548 Return the function parameter on success and null otherwise. */
551 gimple_parm_array_size (tree ptr
, wide_int rng
[2],
552 bool *static_array
/* = NULL */)
554 /* For a function argument try to determine the byte size of the array
555 from the current function declaratation (e.g., attribute access or
557 tree var
= SSA_NAME_VAR (ptr
);
558 if (TREE_CODE (var
) != PARM_DECL
|| !POINTER_TYPE_P (TREE_TYPE (var
)))
561 const unsigned prec
= TYPE_PRECISION (sizetype
);
564 attr_access
*access
= get_parm_access (rdwr_idx
, var
);
568 if (access
->sizarg
!= UINT_MAX
)
570 /* TODO: Try to extract the range from the argument based on
571 those of subsequent assertions or based on known calls to
572 the current function. */
576 if (!access
->minsize
)
579 /* Only consider ordinary array bound at level 2 (or above if it's
581 if (warn_array_parameter
< 2 && !access
->static_p
)
585 *static_array
= access
->static_p
;
587 rng
[0] = wi::zero (prec
);
588 rng
[1] = wi::uhwi (access
->minsize
, prec
);
589 /* Multiply the array bound encoded in the attribute by the size
590 of what the pointer argument to which it decays points to. */
591 tree eltype
= TREE_TYPE (TREE_TYPE (ptr
));
592 tree size
= TYPE_SIZE_UNIT (eltype
);
593 if (!size
|| TREE_CODE (size
) != INTEGER_CST
)
596 rng
[1] *= wi::to_wide (size
, prec
);
600 /* Initialize the object. */
602 access_ref::access_ref ()
603 : ref (), eval ([](tree x
){ return x
; }), deref (), trail1special (true),
604 base0 (true), parmarray ()
607 offrng
[0] = offrng
[1] = 0;
608 offmax
[0] = offmax
[1] = 0;
610 sizrng
[0] = sizrng
[1] = -1;
613 /* Return the PHI node REF refers to or null if it doesn't. */
616 access_ref::phi () const
618 if (!ref
|| TREE_CODE (ref
) != SSA_NAME
)
621 gimple
*def_stmt
= SSA_NAME_DEF_STMT (ref
);
622 if (!def_stmt
|| gimple_code (def_stmt
) != GIMPLE_PHI
)
625 return as_a
<gphi
*> (def_stmt
);
628 /* Determine the size and offset for ARG, append it to ALL_REFS, and
629 merge the result with *THIS. Ignore ARG if SKIP_NULL is set and
630 ARG refers to the null pointer. Return true on success and false
634 access_ref::merge_ref (vec
<access_ref
> *all_refs
, tree arg
, gimple
*stmt
,
635 int ostype
, bool skip_null
,
636 ssa_name_limit_t
&snlim
, pointer_query
&qry
)
639 if (!compute_objsize_r (arg
, stmt
, false, ostype
, &aref
, snlim
, &qry
)
640 || aref
.sizrng
[0] < 0)
642 /* This may be a PHI with all null pointer arguments. Handle it
643 conservatively by setting all properties to the most permissive
646 offrng
[0] = offrng
[1] = 0;
648 set_max_size_range ();
654 access_ref dummy_ref
;
655 aref
.get_ref (all_refs
, &dummy_ref
, ostype
, &snlim
, &qry
);
658 if (TREE_CODE (arg
) == SSA_NAME
)
659 qry
.put_ref (arg
, aref
, ostype
);
662 all_refs
->safe_push (aref
);
666 bool merged_parmarray
= aref
.parmarray
;
668 const bool nullp
= skip_null
&& integer_zerop (arg
);
669 const offset_int maxobjsize
= wi::to_offset (max_object_size ());
670 offset_int minsize
= sizrng
[0];
674 /* If *THIS doesn't contain a meaningful result yet set it to AREF
675 unless the argument is null and it's okay to ignore it. */
679 /* Set if the current argument refers to one or more objects of
680 known size (or range of sizes), as opposed to referring to
681 one or more unknown object(s). */
682 const bool arg_known_size
= (aref
.sizrng
[0] != 0
683 || aref
.sizrng
[1] != maxobjsize
);
685 sizrng
[0] = aref
.sizrng
[0];
690 /* Disregard null pointers in PHIs with two or more arguments.
691 TODO: Handle this better! */
695 const bool known_size
= (sizrng
[0] != 0 || sizrng
[1] != maxobjsize
);
697 if (known_size
&& aref
.sizrng
[0] < minsize
)
698 minsize
= aref
.sizrng
[0];
700 /* Extend the size and offset of *THIS to account for AREF. The result
701 can be cached but results in false negatives. */
704 if (sizrng
[1] < aref
.sizrng
[1])
712 orng
[0] = aref
.offrng
[0];
713 orng
[1] = aref
.offrng
[1];
716 if (orng
[0] < offrng
[0])
718 if (offrng
[1] < orng
[1])
721 /* Reset the PHI's BASE0 flag if any of the nonnull arguments
722 refers to an object at an unknown offset. */
727 parmarray
= merged_parmarray
;
732 /* Determine and return the largest object to which *THIS refers. If
733 *THIS refers to a PHI and PREF is nonnull, fill *PREF with the details
734 of the object determined by compute_objsize(ARG, OSTYPE) for each PHI
738 access_ref::get_ref (vec
<access_ref
> *all_refs
,
739 access_ref
*pref
/* = NULL */,
740 int ostype
/* = 1 */,
741 ssa_name_limit_t
*psnlim
/* = NULL */,
742 pointer_query
*qry
/* = NULL */) const
744 if (!ref
|| TREE_CODE (ref
) != SSA_NAME
)
747 /* FIXME: Calling get_ref() with a null PSNLIM is dangerous and might
748 cause unbounded recursion. */
749 ssa_name_limit_t snlim_buf
;
753 pointer_query empty_qry
;
757 if (gimple
*def_stmt
= SSA_NAME_DEF_STMT (ref
))
759 if (is_gimple_assign (def_stmt
))
761 tree_code code
= gimple_assign_rhs_code (def_stmt
);
762 if (code
!= MIN_EXPR
&& code
!= MAX_EXPR
)
766 tree arg1
= gimple_assign_rhs1 (def_stmt
);
767 aref
.merge_ref (all_refs
, arg1
, def_stmt
, ostype
, false,
770 tree arg2
= gimple_assign_rhs2 (def_stmt
);
771 aref
.merge_ref (all_refs
, arg2
, def_stmt
, ostype
, false,
774 if (pref
&& pref
!= this)
776 tree ref
= pref
->ref
;
787 gphi
*phi_stmt
= this->phi ();
791 if (!psnlim
->visit_phi (ref
))
794 /* The conservative result of the PHI reflecting the offset and size
795 of the largest PHI argument, regardless of whether or not they all
796 refer to the same object. */
800 /* The identity of the object has not been determined yet but
801 PREF->REF is set by the caller to the PHI for convenience.
802 The size is negative/invalid and the offset is zero (it's
803 updated only after the identity of the object has been
805 gcc_assert (pref
->sizrng
[0] < 0);
806 gcc_assert (pref
->offrng
[0] == 0 && pref
->offrng
[1] == 0);
811 const offset_int maxobjsize
= wi::to_offset (max_object_size ());
812 const unsigned nargs
= gimple_phi_num_args (phi_stmt
);
813 for (unsigned i
= 0; i
< nargs
; ++i
)
815 access_ref phi_arg_ref
;
816 bool skip_null
= i
|| i
+ 1 < nargs
;
817 tree arg
= gimple_phi_arg_def (phi_stmt
, i
);
818 phi_ref
.merge_ref (all_refs
, arg
, phi_stmt
, ostype
, skip_null
,
822 && phi_ref
.sizrng
[0] == 0
823 && phi_ref
.sizrng
[1] >= maxobjsize
)
824 /* When an argument results in the most permissive result,
825 the remaining arguments cannot constrain it. Short-circuit
830 if (phi_ref
.sizrng
[0] < 0)
832 /* Fail if none of the PHI's arguments resulted in updating PHI_REF
833 (perhaps because they have all been already visited by prior
835 psnlim
->leave_phi (ref
);
839 /* Avoid changing *THIS. */
840 if (pref
&& pref
!= this)
842 /* Keep the SSA_NAME of the PHI unchanged so that all PHI arguments
843 can be referred to later if necessary. This is useful even if
844 they all refer to the same object. */
845 tree ref
= pref
->ref
;
850 psnlim
->leave_phi (ref
);
855 /* Return the maximum amount of space remaining and if non-null, set
856 argument to the minimum. */
859 access_ref::size_remaining (offset_int
*pmin
/* = NULL */) const
867 /* If the identity of the object hasn't been determined return
868 the maximum size range. */
870 return wi::to_offset (max_object_size ());
873 /* add_offset() ensures the offset range isn't inverted. */
874 gcc_checking_assert (offrng
[0] <= offrng
[1]);
878 /* The offset into referenced object is zero-based (i.e., it's
879 not referenced by a pointer into middle of some unknown object). */
880 if (offrng
[0] < 0 && offrng
[1] < 0)
882 /* If the offset is negative the remaining size is zero. */
887 if (sizrng
[1] <= offrng
[0])
889 /* If the starting offset is greater than or equal to the upper
890 bound on the size of the object, the space remaining is zero.
891 As a special case, if it's equal, set *PMIN to -1 to let
892 the caller know the offset is valid and just past the end. */
893 *pmin
= sizrng
[1] == offrng
[0] ? -1 : 0;
897 /* Otherwise return the size minus the lower bound of the offset. */
898 offset_int or0
= offrng
[0] < 0 ? 0 : offrng
[0];
900 *pmin
= sizrng
[0] - or0
;
901 return sizrng
[1] - or0
;
904 /* The offset to the referenced object isn't zero-based (i.e., it may
905 refer to a byte other than the first. The size of such an object
906 is constrained only by the size of the address space (the result
907 of max_object_size()). */
908 if (sizrng
[1] <= offrng
[0])
914 offset_int or0
= offrng
[0] < 0 ? 0 : offrng
[0];
916 *pmin
= sizrng
[0] - or0
;
917 return sizrng
[1] - or0
;
920 /* Return true if the offset and object size are in range for SIZE. */
923 access_ref::offset_in_range (const offset_int
&size
) const
925 if (size_remaining () < size
)
929 return offmax
[0] >= 0 && offmax
[1] <= sizrng
[1];
931 offset_int maxoff
= wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node
));
932 return offmax
[0] > -maxoff
&& offmax
[1] < maxoff
;
935 /* Add the range [MIN, MAX] to the offset range. For known objects (with
936 zero-based offsets) at least one of whose offset's bounds is in range,
937 constrain the other (or both) to the bounds of the object (i.e., zero
938 and the upper bound of its size). This improves the quality of
941 void access_ref::add_offset (const offset_int
&min
, const offset_int
&max
)
945 /* To add an ordinary range just add it to the bounds. */
951 /* To add an inverted range to an offset to an unknown object
952 expand it to the maximum. */
958 /* To add an inverted range to an offset to an known object set
959 the upper bound to the maximum representable offset value
960 (which may be greater than MAX_OBJECT_SIZE).
961 The lower bound is either the sum of the current offset and
962 MIN when abs(MAX) is greater than the former, or zero otherwise.
963 Zero because then the inverted range includes the negative of
965 offset_int maxoff
= wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node
));
976 offset_int absmax
= wi::abs (max
);
977 if (offrng
[0] < absmax
)
980 /* Cap the lower bound at the upper (set to MAXOFF above)
981 to avoid inadvertently recreating an inverted range. */
982 if (offrng
[1] < offrng
[0])
983 offrng
[0] = offrng
[1];
989 /* Set the minimum and maximmum computed so far. */
990 if (offrng
[1] < 0 && offrng
[1] < offmax
[0])
991 offmax
[0] = offrng
[1];
992 if (offrng
[0] > 0 && offrng
[0] > offmax
[1])
993 offmax
[1] = offrng
[0];
998 /* When referencing a known object check to see if the offset computed
999 so far is in bounds... */
1000 offset_int remrng
[2];
1001 remrng
[1] = size_remaining (remrng
);
1002 if (remrng
[1] > 0 || remrng
[0] < 0)
1004 /* ...if so, constrain it so that neither bound exceeds the size of
1005 the object. Out of bounds offsets are left unchanged, and, for
1006 better or worse, become in bounds later. They should be detected
1007 and diagnosed at the point they first become invalid by
1011 if (offrng
[1] > sizrng
[1])
1012 offrng
[1] = sizrng
[1];
1016 /* Issue one inform message describing each target of an access REF.
1017 WRITE is set for a write access and clear for a read access. */
1020 access_ref::inform_access (access_mode mode
, int ostype
/* = 1 */) const
1022 const access_ref
&aref
= *this;
1028 /* Set MAXREF to refer to the largest object and fill ALL_REFS
1029 with data for all objects referenced by the PHI arguments. */
1031 auto_vec
<access_ref
> all_refs
;
1032 if (!get_ref (&all_refs
, &maxref
, ostype
))
1035 if (all_refs
.length ())
1037 /* Except for MAXREF, the rest of the arguments' offsets need not
1038 reflect one added to the PHI itself. Determine the latter from
1039 MAXREF on which the result is based. */
1040 const offset_int orng
[] =
1042 offrng
[0] - maxref
.offrng
[0],
1043 wi::smax (offrng
[1] - maxref
.offrng
[1], offrng
[0]),
1046 /* Add the final PHI's offset to that of each of the arguments
1047 and recurse to issue an inform message for it. */
1048 for (unsigned i
= 0; i
!= all_refs
.length (); ++i
)
1050 /* Skip any PHIs; those could lead to infinite recursion. */
1051 if (all_refs
[i
].phi ())
1054 all_refs
[i
].add_offset (orng
[0], orng
[1]);
1055 all_refs
[i
].inform_access (mode
, ostype
);
1061 /* Convert offset range and avoid including a zero range since it
1062 isn't necessarily meaningful. */
1063 HOST_WIDE_INT diff_min
= tree_to_shwi (TYPE_MIN_VALUE (ptrdiff_type_node
));
1064 HOST_WIDE_INT diff_max
= tree_to_shwi (TYPE_MAX_VALUE (ptrdiff_type_node
));
1065 HOST_WIDE_INT minoff
;
1066 HOST_WIDE_INT maxoff
= diff_max
;
1067 if (wi::fits_shwi_p (aref
.offrng
[0]))
1068 minoff
= aref
.offrng
[0].to_shwi ();
1070 minoff
= aref
.offrng
[0] < 0 ? diff_min
: diff_max
;
1072 if (wi::fits_shwi_p (aref
.offrng
[1]))
1073 maxoff
= aref
.offrng
[1].to_shwi ();
1075 if (maxoff
<= diff_min
|| maxoff
>= diff_max
)
1076 /* Avoid mentioning an upper bound that's equal to or in excess
1077 of the maximum of ptrdiff_t. */
1080 /* Convert size range and always include it since all sizes are
1082 unsigned long long minsize
= 0, maxsize
= 0;
1083 if (wi::fits_shwi_p (aref
.sizrng
[0])
1084 && wi::fits_shwi_p (aref
.sizrng
[1]))
1086 minsize
= aref
.sizrng
[0].to_shwi ();
1087 maxsize
= aref
.sizrng
[1].to_shwi ();
1090 /* SIZRNG doesn't necessarily have the same range as the allocation
1091 size determined by gimple_call_alloc_size (). */
1093 if (minsize
== maxsize
)
1094 sprintf (sizestr
, "%llu", minsize
);
1096 sprintf (sizestr
, "[%llu, %llu]", minsize
, maxsize
);
1100 && (maxoff
== 0 || aref
.sizrng
[1] <= maxoff
))
1102 else if (minoff
== maxoff
)
1103 sprintf (offstr
, "%lli", (long long) minoff
);
1105 sprintf (offstr
, "[%lli, %lli]", (long long) minoff
, (long long) maxoff
);
1107 location_t loc
= UNKNOWN_LOCATION
;
1109 tree ref
= this->ref
;
1110 tree allocfn
= NULL_TREE
;
1111 if (TREE_CODE (ref
) == SSA_NAME
)
1113 gimple
*stmt
= SSA_NAME_DEF_STMT (ref
);
1117 if (is_gimple_call (stmt
))
1119 loc
= gimple_location (stmt
);
1120 if (gimple_call_builtin_p (stmt
, BUILT_IN_ALLOCA_WITH_ALIGN
))
1122 /* Strip the SSA_NAME suffix from the variable name and
1123 recreate an identifier with the VLA's original name. */
1124 ref
= gimple_call_lhs (stmt
);
1125 if (SSA_NAME_IDENTIFIER (ref
))
1127 ref
= SSA_NAME_IDENTIFIER (ref
);
1128 const char *id
= IDENTIFIER_POINTER (ref
);
1129 size_t len
= strcspn (id
, ".$");
1132 ref
= get_identifier_with_length (id
, len
);
1137 /* Except for VLAs, retrieve the allocation function. */
1138 allocfn
= gimple_call_fndecl (stmt
);
1140 allocfn
= gimple_call_fn (stmt
);
1141 if (TREE_CODE (allocfn
) == SSA_NAME
)
1143 /* For an ALLOC_CALL via a function pointer make a small
1144 effort to determine the destination of the pointer. */
1145 gimple
*def
= SSA_NAME_DEF_STMT (allocfn
);
1146 if (gimple_assign_single_p (def
))
1148 tree rhs
= gimple_assign_rhs1 (def
);
1151 else if (TREE_CODE (rhs
) == COMPONENT_REF
)
1152 allocfn
= TREE_OPERAND (rhs
, 1);
1157 else if (gimple_nop_p (stmt
))
1158 /* Handle DECL_PARM below. */
1159 ref
= SSA_NAME_VAR (ref
);
1160 else if (is_gimple_assign (stmt
)
1161 && (gimple_assign_rhs_code (stmt
) == MIN_EXPR
1162 || gimple_assign_rhs_code (stmt
) == MAX_EXPR
))
1164 /* MIN or MAX_EXPR here implies a reference to a known object
1165 and either an unknown or distinct one (the latter being
1166 the result of an invalid relational expression). Determine
1167 the identity of the former and point to it in the note.
1168 TODO: Consider merging with PHI handling. */
1169 access_ref arg_ref
[2];
1170 tree arg
= gimple_assign_rhs1 (stmt
);
1171 compute_objsize (arg
, /* ostype = */ 1 , &arg_ref
[0]);
1172 arg
= gimple_assign_rhs2 (stmt
);
1173 compute_objsize (arg
, /* ostype = */ 1 , &arg_ref
[1]);
1175 /* Use the argument that references a known object with more
1178 = (!arg_ref
[0].ref
|| !arg_ref
[0].base0
1179 || (arg_ref
[0].base0
&& arg_ref
[1].base0
1180 && (arg_ref
[0].size_remaining ()
1181 < arg_ref
[1].size_remaining ())));
1183 arg_ref
[idx
].offrng
[0] = offrng
[0];
1184 arg_ref
[idx
].offrng
[1] = offrng
[1];
1185 arg_ref
[idx
].inform_access (mode
);
1191 loc
= DECL_SOURCE_LOCATION (ref
);
1192 else if (EXPR_P (ref
) && EXPR_HAS_LOCATION (ref
))
1193 loc
= EXPR_LOCATION (ref
);
1194 else if (TREE_CODE (ref
) != IDENTIFIER_NODE
1195 && TREE_CODE (ref
) != SSA_NAME
)
1198 if (mode
== access_read_write
|| mode
== access_write_only
)
1200 if (allocfn
== NULL_TREE
)
1203 inform (loc
, "at offset %s into destination object %qE of size %s",
1204 offstr
, ref
, sizestr
);
1206 inform (loc
, "destination object %qE of size %s", ref
, sizestr
);
1212 "at offset %s into destination object of size %s "
1213 "allocated by %qE", offstr
, sizestr
, allocfn
);
1215 inform (loc
, "destination object of size %s allocated by %qE",
1220 if (mode
== access_read_only
)
1222 if (allocfn
== NULL_TREE
)
1225 inform (loc
, "at offset %s into source object %qE of size %s",
1226 offstr
, ref
, sizestr
);
1228 inform (loc
, "source object %qE of size %s", ref
, sizestr
);
1235 "at offset %s into source object of size %s allocated by %qE",
1236 offstr
, sizestr
, allocfn
);
1238 inform (loc
, "source object of size %s allocated by %qE",
1243 if (allocfn
== NULL_TREE
)
1246 inform (loc
, "at offset %s into object %qE of size %s",
1247 offstr
, ref
, sizestr
);
1249 inform (loc
, "object %qE of size %s", ref
, sizestr
);
1256 "at offset %s into object of size %s allocated by %qE",
1257 offstr
, sizestr
, allocfn
);
1259 inform (loc
, "object of size %s allocated by %qE",
1263 /* Dump *THIS to FILE. */
1266 access_ref::dump (FILE *file
) const
1268 for (int i
= deref
; i
< 0; ++i
)
1271 for (int i
= 0; i
< deref
; ++i
)
1274 if (gphi
*phi_stmt
= phi ())
1276 fputs ("PHI <", file
);
1277 unsigned nargs
= gimple_phi_num_args (phi_stmt
);
1278 for (unsigned i
= 0; i
!= nargs
; ++i
)
1280 tree arg
= gimple_phi_arg_def (phi_stmt
, i
);
1281 print_generic_expr (file
, arg
);
1288 print_generic_expr (file
, ref
);
1290 if (offrng
[0] != offrng
[1])
1291 fprintf (file
, " + [%lli, %lli]",
1292 (long long) offrng
[0].to_shwi (),
1293 (long long) offrng
[1].to_shwi ());
1294 else if (offrng
[0] != 0)
1295 fprintf (file
, " %c %lli",
1296 offrng
[0] < 0 ? '-' : '+',
1297 (long long) offrng
[0].to_shwi ());
1300 fputs (" (base0)", file
);
1302 fputs ("; size: ", file
);
1303 if (sizrng
[0] != sizrng
[1])
1305 offset_int maxsize
= wi::to_offset (max_object_size ());
1306 if (sizrng
[0] == 0 && sizrng
[1] >= maxsize
)
1307 fputs ("unknown", file
);
1309 fprintf (file
, "[%llu, %llu]",
1310 (unsigned long long) sizrng
[0].to_uhwi (),
1311 (unsigned long long) sizrng
[1].to_uhwi ());
1313 else if (sizrng
[0] != 0)
1314 fprintf (file
, "%llu",
1315 (unsigned long long) sizrng
[0].to_uhwi ());
1320 /* Set the access to at most MAXWRITE and MAXREAD bytes, and at least 1
1321 when MINWRITE or MINREAD, respectively, is set. */
1322 access_data::access_data (range_query
*query
, gimple
*stmt
, access_mode mode
,
1323 tree maxwrite
/* = NULL_TREE */,
1324 bool minwrite
/* = false */,
1325 tree maxread
/* = NULL_TREE */,
1326 bool minread
/* = false */)
1327 : stmt (stmt
), call (), dst (), src (), mode (mode
), ostype ()
1329 set_bound (dst_bndrng
, maxwrite
, minwrite
, query
, stmt
);
1330 set_bound (src_bndrng
, maxread
, minread
, query
, stmt
);
1333 /* Set the access to at most MAXWRITE and MAXREAD bytes, and at least 1
1334 when MINWRITE or MINREAD, respectively, is set. */
1335 access_data::access_data (range_query
*query
, tree expr
, access_mode mode
,
1336 tree maxwrite
/* = NULL_TREE */,
1337 bool minwrite
/* = false */,
1338 tree maxread
/* = NULL_TREE */,
1339 bool minread
/* = false */)
1340 : stmt (), call (expr
), dst (), src (), mode (mode
), ostype ()
1342 set_bound (dst_bndrng
, maxwrite
, minwrite
, query
, stmt
);
1343 set_bound (src_bndrng
, maxread
, minread
, query
, stmt
);
1346 /* Set BNDRNG to the range of BOUND for the statement STMT. */
1349 access_data::set_bound (offset_int bndrng
[2], tree bound
, bool minaccess
,
1350 range_query
*query
, gimple
*stmt
)
1352 /* Set the default bounds of the access and adjust below. */
1353 bndrng
[0] = minaccess
? 1 : 0;
1354 bndrng
[1] = HOST_WIDE_INT_M1U
;
1356 /* When BOUND is nonnull and a range can be extracted from it,
1357 set the bounds of the access to reflect both it and MINACCESS.
1358 BNDRNG[0] is the size of the minimum access. */
1360 if (bound
&& get_size_range (query
, bound
, stmt
, rng
, SR_ALLOW_ZERO
))
1362 bndrng
[0] = wi::to_offset (rng
[0]);
1363 bndrng
[1] = wi::to_offset (rng
[1]);
1364 bndrng
[0] = bndrng
[0] > 0 && minaccess
? 1 : 0;
1368 /* Set a bit for the PHI in VISITED and return true if it wasn't
1372 ssa_name_limit_t::visit_phi (tree ssa_name
)
1375 visited
= BITMAP_ALLOC (NULL
);
1377 /* Return false if SSA_NAME has already been visited. */
1378 return bitmap_set_bit (visited
, SSA_NAME_VERSION (ssa_name
));
1381 /* Clear a bit for the PHI in VISITED. */
1384 ssa_name_limit_t::leave_phi (tree ssa_name
)
1386 /* Return false if SSA_NAME has already been visited. */
1387 bitmap_clear_bit (visited
, SSA_NAME_VERSION (ssa_name
));
1390 /* Return false if the SSA_NAME chain length counter has reached
1391 the limit, otherwise increment the counter and return true. */
1394 ssa_name_limit_t::next ()
1396 /* Return a negative value to let caller avoid recursing beyond
1397 the specified limit. */
1398 if (ssa_def_max
== 0)
1405 /* If the SSA_NAME has already been "seen" return a positive value.
1406 Otherwise add it to VISITED. If the SSA_NAME limit has been
1407 reached, return a negative value. Otherwise return zero. */
1410 ssa_name_limit_t::next_phi (tree ssa_name
)
1413 gimple
*def_stmt
= SSA_NAME_DEF_STMT (ssa_name
);
1414 /* Return a positive value if the PHI has already been visited. */
1415 if (gimple_code (def_stmt
) == GIMPLE_PHI
1416 && !visit_phi (ssa_name
))
1420 /* Return a negative value to let caller avoid recursing beyond
1421 the specified limit. */
1422 if (ssa_def_max
== 0)
1430 ssa_name_limit_t::~ssa_name_limit_t ()
1433 BITMAP_FREE (visited
);
1436 /* Default ctor. Initialize object with pointers to the range_query
1437 instance to use or null. */
1439 pointer_query::pointer_query (range_query
*qry
/* = NULL */)
1440 : rvals (qry
), hits (), misses (), failures (), depth (), max_depth (),
1446 /* Return a pointer to the cached access_ref instance for the SSA_NAME
1447 PTR if it's there or null otherwise. */
1450 pointer_query::get_ref (tree ptr
, int ostype
/* = 1 */) const
1452 unsigned version
= SSA_NAME_VERSION (ptr
);
1453 unsigned idx
= version
<< 1 | (ostype
& 1);
1454 if (var_cache
.indices
.length () <= idx
)
1460 unsigned cache_idx
= var_cache
.indices
[idx
];
1461 if (var_cache
.access_refs
.length () <= cache_idx
)
1467 const access_ref
&cache_ref
= var_cache
.access_refs
[cache_idx
];
1478 /* Retrieve the access_ref instance for a variable from the cache if it's
1479 there or compute it and insert it into the cache if it's nonnonull. */
1482 pointer_query::get_ref (tree ptr
, gimple
*stmt
, access_ref
*pref
,
1483 int ostype
/* = 1 */)
1485 const unsigned version
1486 = TREE_CODE (ptr
) == SSA_NAME
? SSA_NAME_VERSION (ptr
) : 0;
1490 unsigned idx
= version
<< 1 | (ostype
& 1);
1491 if (idx
< var_cache
.indices
.length ())
1493 unsigned cache_idx
= var_cache
.indices
[idx
] - 1;
1494 if (cache_idx
< var_cache
.access_refs
.length ()
1495 && var_cache
.access_refs
[cache_idx
].ref
)
1498 *pref
= var_cache
.access_refs
[cache_idx
];
1506 if (!compute_objsize (ptr
, stmt
, ostype
, pref
, this))
1515 /* Add a copy of the access_ref REF for the SSA_NAME to the cache if it's
1519 pointer_query::put_ref (tree ptr
, const access_ref
&ref
, int ostype
/* = 1 */)
1521 /* Only add populated/valid entries. */
1522 if (!ref
.ref
|| ref
.sizrng
[0] < 0)
1525 /* Add REF to the two-level cache. */
1526 unsigned version
= SSA_NAME_VERSION (ptr
);
1527 unsigned idx
= version
<< 1 | (ostype
& 1);
1529 /* Grow INDICES if necessary. An index is valid if it's nonzero.
1530 Its value minus one is the index into ACCESS_REFS. Not all
1531 entries are valid. */
1532 if (var_cache
.indices
.length () <= idx
)
1533 var_cache
.indices
.safe_grow_cleared (idx
+ 1);
1535 if (!var_cache
.indices
[idx
])
1536 var_cache
.indices
[idx
] = var_cache
.access_refs
.length () + 1;
1538 /* Grow ACCESS_REF cache if necessary. An entry is valid if its
1539 REF member is nonnull. All entries except for the last two
1540 are valid. Once nonnull, the REF value must stay unchanged. */
1541 unsigned cache_idx
= var_cache
.indices
[idx
];
1542 if (var_cache
.access_refs
.length () <= cache_idx
)
1543 var_cache
.access_refs
.safe_grow_cleared (cache_idx
+ 1);
1545 access_ref
&cache_ref
= var_cache
.access_refs
[cache_idx
];
1548 gcc_checking_assert (cache_ref
.ref
== ref
.ref
);
1555 /* Flush the cache if it's nonnull. */
1558 pointer_query::flush_cache ()
1560 var_cache
.indices
.release ();
1561 var_cache
.access_refs
.release ();
1564 /* Dump statistics and, optionally, cache contents to DUMP_FILE. */
1567 pointer_query::dump (FILE *dump_file
, bool contents
/* = false */)
1569 unsigned nused
= 0, nrefs
= 0;
1570 unsigned nidxs
= var_cache
.indices
.length ();
1571 for (unsigned i
= 0; i
!= nidxs
; ++i
)
1573 unsigned ari
= var_cache
.indices
[i
];
1579 const access_ref
&aref
= var_cache
.access_refs
[ari
];
1586 fprintf (dump_file
, "pointer_query counters:\n"
1587 " index cache size: %u\n"
1588 " index entries: %u\n"
1589 " access cache size: %u\n"
1590 " access entries: %u\n"
1596 var_cache
.access_refs
.length (), nrefs
,
1597 hits
, misses
, failures
, max_depth
);
1599 if (!contents
|| !nidxs
)
1602 fputs ("\npointer_query cache contents:\n", dump_file
);
1604 for (unsigned i
= 0; i
!= nidxs
; ++i
)
1606 unsigned ari
= var_cache
.indices
[i
];
1610 const access_ref
&aref
= var_cache
.access_refs
[ari
];
1614 /* The level-1 cache index corresponds to the SSA_NAME_VERSION
1615 shifted left by one and ORed with the Object Size Type in
1616 the lowest bit. Print the two separately. */
1617 unsigned ver
= i
>> 1;
1618 unsigned ost
= i
& 1;
1620 fprintf (dump_file
, " %u.%u[%u]: ", ver
, ost
, ari
);
1621 if (tree name
= ssa_name (ver
))
1623 print_generic_expr (dump_file
, name
);
1624 fputs (" = ", dump_file
);
1627 fprintf (dump_file
, " _%u = ", ver
);
1629 aref
.dump (dump_file
);
1632 fputc ('\n', dump_file
);
1635 /* A helper of compute_objsize_r() to determine the size from an assignment
1636 statement STMT with the RHS of either MIN_EXPR or MAX_EXPR. On success
1637 set PREF->REF to the operand with more or less space remaining,
1638 respectively, if both refer to the same (sub)object, or to PTR if they
1639 might not, and return true. Otherwise, if the identity of neither
1640 operand can be determined, return false. */
1643 handle_min_max_size (tree ptr
, int ostype
, access_ref
*pref
,
1644 ssa_name_limit_t
&snlim
, pointer_query
*qry
)
1646 gimple
*stmt
= SSA_NAME_DEF_STMT (ptr
);
1647 const tree_code code
= gimple_assign_rhs_code (stmt
);
1649 /* In a valid MAX_/MIN_EXPR both operands must refer to the same array.
1650 Determine the size/offset of each and use the one with more or less
1651 space remaining, respectively. If either fails, use the information
1652 determined from the other instead, adjusted up or down as appropriate
1653 for the expression. */
1654 access_ref aref
[2] = { *pref
, *pref
};
1655 tree arg1
= gimple_assign_rhs1 (stmt
);
1656 if (!compute_objsize_r (arg1
, stmt
, false, ostype
, &aref
[0], snlim
, qry
))
1658 aref
[0].base0
= false;
1659 aref
[0].offrng
[0] = aref
[0].offrng
[1] = 0;
1660 aref
[0].add_max_offset ();
1661 aref
[0].set_max_size_range ();
1664 tree arg2
= gimple_assign_rhs2 (stmt
);
1665 if (!compute_objsize_r (arg2
, stmt
, false, ostype
, &aref
[1], snlim
, qry
))
1667 aref
[1].base0
= false;
1668 aref
[1].offrng
[0] = aref
[1].offrng
[1] = 0;
1669 aref
[1].add_max_offset ();
1670 aref
[1].set_max_size_range ();
1673 if (!aref
[0].ref
&& !aref
[1].ref
)
1674 /* Fail if the identity of neither argument could be determined. */
1678 if (aref
[0].ref
&& aref
[0].base0
)
1680 if (aref
[1].ref
&& aref
[1].base0
)
1682 /* If the object referenced by both arguments has been determined
1683 set *PREF to the one with more or less space remainng, whichever
1684 is appopriate for CODE.
1685 TODO: Indicate when the objects are distinct so it can be
1687 i0
= code
== MAX_EXPR
;
1688 const bool i1
= !i0
;
1690 if (aref
[i0
].size_remaining () < aref
[i1
].size_remaining ())
1695 if (aref
[i0
].ref
!= aref
[i1
].ref
)
1696 /* If the operands don't refer to the same (sub)object set
1697 PREF->REF to the SSA_NAME from which STMT was obtained
1698 so that both can be identified in a diagnostic. */
1704 /* If only the object referenced by one of the arguments could be
1705 determined, use it and... */
1712 const bool i1
= !i0
;
1713 /* ...see if the offset obtained from the other pointer can be used
1714 to tighten up the bound on the offset obtained from the first. */
1715 if ((code
== MAX_EXPR
&& aref
[i1
].offrng
[1] < aref
[i0
].offrng
[0])
1716 || (code
== MIN_EXPR
&& aref
[i0
].offrng
[0] < aref
[i1
].offrng
[1]))
1718 pref
->offrng
[0] = aref
[i0
].offrng
[0];
1719 pref
->offrng
[1] = aref
[i0
].offrng
[1];
1722 /* Replace PTR->REF with the SSA_NAME to indicate the expression
1723 might not refer to the same (sub)object. */
1728 /* A helper of compute_objsize_r() to determine the size of a DECL.
1729 Return true on success and (possibly in the future) false on failure. */
1732 handle_decl (tree decl
, bool addr
, access_ref
*pref
)
1734 tree decl_type
= TREE_TYPE (decl
);
1738 /* Reset the offset in case it was set by a prior call and not
1739 cleared by the caller. The offset is only adjusted after
1740 the identity of the object has been determined. */
1741 pref
->offrng
[0] = pref
->offrng
[1] = 0;
1743 if (!addr
&& POINTER_TYPE_P (decl_type
))
1745 /* Set the maximum size if the reference is to the pointer
1746 itself (as opposed to what it points to), and clear
1747 BASE0 since the offset isn't necessarily zero-based. */
1748 pref
->set_max_size_range ();
1749 pref
->base0
= false;
1753 /* Valid offsets into the object are nonnegative. */
1756 if (tree size
= decl_init_size (decl
, false))
1757 if (TREE_CODE (size
) == INTEGER_CST
)
1759 pref
->sizrng
[0] = wi::to_offset (size
);
1760 pref
->sizrng
[1] = pref
->sizrng
[0];
1764 pref
->set_max_size_range ();
1768 /* A helper of compute_objsize_r() to determine the size from ARRAY_REF
1769 AREF. ADDR is true if PTR is the operand of ADDR_EXPR. Return true
1770 on success and false on failure. */
1773 handle_array_ref (tree aref
, gimple
*stmt
, bool addr
, int ostype
,
1774 access_ref
*pref
, ssa_name_limit_t
&snlim
,
1777 gcc_assert (TREE_CODE (aref
) == ARRAY_REF
);
1779 tree arefop
= TREE_OPERAND (aref
, 0);
1780 tree reftype
= TREE_TYPE (arefop
);
1781 if (!addr
&& TREE_CODE (TREE_TYPE (reftype
)) == POINTER_TYPE
)
1782 /* Avoid arrays of pointers. FIXME: Hande pointers to arrays
1786 if (!compute_objsize_r (arefop
, stmt
, addr
, ostype
, pref
, snlim
, qry
))
1790 tree off
= pref
->eval (TREE_OPERAND (aref
, 1));
1791 range_query
*const rvals
= qry
? qry
->rvals
: NULL
;
1792 if (!get_offset_range (off
, stmt
, orng
, rvals
))
1794 /* Set ORNG to the maximum offset representable in ptrdiff_t. */
1795 orng
[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node
));
1796 orng
[0] = -orng
[1] - 1;
1799 /* Convert the array index range determined above to a byte
1801 tree lowbnd
= array_ref_low_bound (aref
);
1802 if (!integer_zerop (lowbnd
) && tree_fits_uhwi_p (lowbnd
))
1804 /* Adjust the index by the low bound of the array domain
1805 (normally zero but 1 in Fortran). */
1806 unsigned HOST_WIDE_INT lb
= tree_to_uhwi (lowbnd
);
1811 tree eltype
= TREE_TYPE (aref
);
1812 tree tpsize
= TYPE_SIZE_UNIT (eltype
);
1813 if (!tpsize
|| TREE_CODE (tpsize
) != INTEGER_CST
)
1815 pref
->add_max_offset ();
1819 offset_int sz
= wi::to_offset (tpsize
);
1823 if (ostype
&& TREE_CODE (eltype
) == ARRAY_TYPE
)
1825 /* Except for the permissive raw memory functions which use
1826 the size of the whole object determined above, use the size
1827 of the referenced array. Because the overall offset is from
1828 the beginning of the complete array object add this overall
1829 offset to the size of array. */
1830 offset_int sizrng
[2] =
1832 pref
->offrng
[0] + orng
[0] + sz
,
1833 pref
->offrng
[1] + orng
[1] + sz
1835 if (sizrng
[1] < sizrng
[0])
1836 std::swap (sizrng
[0], sizrng
[1]);
1837 if (sizrng
[0] >= 0 && sizrng
[0] <= pref
->sizrng
[0])
1838 pref
->sizrng
[0] = sizrng
[0];
1839 if (sizrng
[1] >= 0 && sizrng
[1] <= pref
->sizrng
[1])
1840 pref
->sizrng
[1] = sizrng
[1];
1843 pref
->add_offset (orng
[0], orng
[1]);
1847 /* Given a COMPONENT_REF CREF, set *PREF size to the size of the referenced
1851 set_component_ref_size (tree cref
, access_ref
*pref
)
1853 const tree base
= TREE_OPERAND (cref
, 0);
1854 const tree base_type
= TREE_TYPE (base
);
1856 /* SAM is set for array members that might need special treatment. */
1857 special_array_member sam
;
1858 tree size
= component_ref_size (cref
, &sam
);
1859 if (sam
== special_array_member::int_0
)
1860 pref
->sizrng
[0] = pref
->sizrng
[1] = 0;
1861 else if (!pref
->trail1special
&& sam
== special_array_member::trail_1
)
1862 pref
->sizrng
[0] = pref
->sizrng
[1] = 1;
1863 else if (size
&& TREE_CODE (size
) == INTEGER_CST
)
1864 pref
->sizrng
[0] = pref
->sizrng
[1] = wi::to_offset (size
);
1867 /* When the size of the member is unknown it's either a flexible
1868 array member or a trailing special array member (either zero
1869 length or one-element). Set the size to the maximum minus
1870 the constant size of the base object's type. */
1871 pref
->sizrng
[0] = 0;
1872 pref
->sizrng
[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node
));
1873 if (tree base_size
= TYPE_SIZE_UNIT (base_type
))
1874 if (TREE_CODE (base_size
) == INTEGER_CST
)
1875 pref
->sizrng
[1] -= wi::to_offset (base_size
);
1879 /* A helper of compute_objsize_r() to determine the size from COMPONENT_REF
1880 CREF. Return true on success and false on failure. */
1883 handle_component_ref (tree cref
, gimple
*stmt
, bool addr
, int ostype
,
1884 access_ref
*pref
, ssa_name_limit_t
&snlim
,
1887 gcc_assert (TREE_CODE (cref
) == COMPONENT_REF
);
1889 const tree base
= TREE_OPERAND (cref
, 0);
1890 const tree field
= TREE_OPERAND (cref
, 1);
1891 access_ref base_ref
= *pref
;
1893 /* Unconditionally determine the size of the base object (it could
1894 be smaller than the referenced member when the object is stored
1895 in a buffer with an insufficient size). */
1896 if (!compute_objsize_r (base
, stmt
, addr
, 0, &base_ref
, snlim
, qry
))
1899 /* Add the offset of the member to the offset into the object computed
1901 tree offset
= byte_position (field
);
1902 if (TREE_CODE (offset
) == INTEGER_CST
)
1903 base_ref
.add_offset (wi::to_offset (offset
));
1905 base_ref
.add_max_offset ();
1908 /* PREF->REF may have been already set to an SSA_NAME earlier
1909 to provide better context for diagnostics. In that case,
1910 leave it unchanged. */
1911 base_ref
.ref
= base
;
1913 const tree base_type
= TREE_TYPE (base
);
1914 if (TREE_CODE (base_type
) == UNION_TYPE
)
1915 /* In accesses through union types consider the entire unions
1916 rather than just their members. */
1921 /* In OSTYPE zero (for raw memory functions like memcpy), use
1922 the maximum size instead if the identity of the enclosing
1923 object cannot be determined. */
1930 if (!addr
&& POINTER_TYPE_P (TREE_TYPE (field
)))
1932 /* Set maximum size if the reference is to the pointer member
1933 itself (as opposed to what it points to). */
1934 pref
->set_max_size_range ();
1938 set_component_ref_size (cref
, pref
);
1940 if (base_ref
.size_remaining () < pref
->size_remaining ())
1941 /* Use the base object if it's smaller than the member. */
1947 /* A helper of compute_objsize_r() to determine the size from MEM_REF
1948 MREF. Return true on success and false on failure. */
1951 handle_mem_ref (tree mref
, gimple
*stmt
, int ostype
, access_ref
*pref
,
1952 ssa_name_limit_t
&snlim
, pointer_query
*qry
)
1954 gcc_assert (TREE_CODE (mref
) == MEM_REF
);
1956 tree mreftype
= TYPE_MAIN_VARIANT (TREE_TYPE (mref
));
1957 if (VECTOR_TYPE_P (mreftype
))
1959 /* Hack: Handle MEM_REFs of vector types as those to complete
1960 objects; those may be synthesized from multiple assignments
1961 to consecutive data members (see PR 93200 and 96963).
1962 FIXME: Vectorized assignments should only be present after
1963 vectorization so this hack is only necessary after it has
1964 run and could be avoided in calls from prior passes (e.g.,
1965 tree-ssa-strlen.cc).
1966 FIXME: Deal with this more generally, e.g., by marking up
1967 such MEM_REFs at the time they're created. */
1971 tree mrefop
= TREE_OPERAND (mref
, 0);
1972 if (!compute_objsize_r (mrefop
, stmt
, false, ostype
, pref
, snlim
, qry
))
1978 tree off
= pref
->eval (TREE_OPERAND (mref
, 1));
1979 range_query
*const rvals
= qry
? qry
->rvals
: NULL
;
1980 if (!get_offset_range (off
, stmt
, orng
, rvals
))
1982 /* Set ORNG to the maximum offset representable in ptrdiff_t. */
1983 orng
[1] = wi::to_offset (TYPE_MAX_VALUE (ptrdiff_type_node
));
1984 orng
[0] = -orng
[1] - 1;
1987 pref
->add_offset (orng
[0], orng
[1]);
1991 /* A helper of compute_objsize_r() to determine the size from SSA_NAME
1992 PTR. Return true on success and false on failure. */
1995 handle_ssa_name (tree ptr
, bool addr
, int ostype
,
1996 access_ref
*pref
, ssa_name_limit_t
&snlim
,
2002 /* Only process an SSA_NAME if the recursion limit has not yet
2006 if (++qry
->depth
> qry
->max_depth
)
2007 qry
->max_depth
= qry
->depth
;
2008 if (const access_ref
*cache_ref
= qry
->get_ref (ptr
, ostype
))
2010 /* Add the number of DEREFerences accummulated so far. */
2011 const int deref
= pref
->deref
;
2013 pref
->deref
+= deref
;
2018 gimple
*stmt
= SSA_NAME_DEF_STMT (ptr
);
2019 if (is_gimple_call (stmt
))
2021 /* If STMT is a call to an allocation function get the size
2022 from its argument(s). If successful, also set *PREF->REF
2023 to PTR for the caller to include in diagnostics. */
2025 range_query
*const rvals
= qry
? qry
->rvals
: NULL
;
2026 if (gimple_call_alloc_size (stmt
, wr
, rvals
))
2029 pref
->sizrng
[0] = offset_int::from (wr
[0], UNSIGNED
);
2030 pref
->sizrng
[1] = offset_int::from (wr
[1], UNSIGNED
);
2031 /* Constrain both bounds to a valid size. */
2032 offset_int maxsize
= wi::to_offset (max_object_size ());
2033 if (pref
->sizrng
[0] > maxsize
)
2034 pref
->sizrng
[0] = maxsize
;
2035 if (pref
->sizrng
[1] > maxsize
)
2036 pref
->sizrng
[1] = maxsize
;
2040 /* For functions known to return one of their pointer arguments
2041 try to determine what the returned pointer points to, and on
2042 success add OFFRNG which was set to the offset added by
2043 the function (e.g., memchr) to the overall offset. */
2045 offset_int offrng
[2];
2046 if (tree ret
= gimple_call_return_array (stmt
, offrng
, &past_end
,
2049 if (!compute_objsize_r (ret
, stmt
, addr
, ostype
, pref
, snlim
, qry
))
2052 /* Cap OFFRNG[1] to at most the remaining size of
2054 offset_int remrng
[2];
2055 remrng
[1] = pref
->size_remaining (remrng
);
2056 if (remrng
[1] != 0 && !past_end
)
2057 /* Decrement the size for functions that never return
2058 a past-the-end pointer. */
2061 if (remrng
[1] < offrng
[1])
2062 offrng
[1] = remrng
[1];
2063 pref
->add_offset (offrng
[0], offrng
[1]);
2067 /* For other calls that might return arbitrary pointers
2068 including into the middle of objects set the size
2069 range to maximum, clear PREF->BASE0, and also set
2070 PREF->REF to include in diagnostics. */
2071 pref
->set_max_size_range ();
2072 pref
->base0
= false;
2076 qry
->put_ref (ptr
, *pref
, ostype
);
2080 if (gimple_nop_p (stmt
))
2082 /* For a function argument try to determine the byte size
2083 of the array from the current function declaratation
2084 (e.g., attribute access or related). */
2086 bool static_array
= false;
2087 if (tree ref
= gimple_parm_array_size (ptr
, wr
, &static_array
))
2089 pref
->parmarray
= !static_array
;
2090 pref
->sizrng
[0] = offset_int::from (wr
[0], UNSIGNED
);
2091 pref
->sizrng
[1] = offset_int::from (wr
[1], UNSIGNED
);
2093 qry
->put_ref (ptr
, *pref
, ostype
);
2097 pref
->set_max_size_range ();
2098 pref
->base0
= false;
2100 qry
->put_ref (ptr
, *pref
, ostype
);
2104 if (gimple_code (stmt
) == GIMPLE_PHI
)
2106 /* Pass PTR to get_ref() via PREF. If all PHI arguments refer
2107 to the same object the function will replace it with it. */
2109 access_ref phi_ref
= *pref
;
2110 if (!pref
->get_ref (NULL
, &phi_ref
, ostype
, &snlim
, qry
))
2113 qry
->put_ref (ptr
, *pref
, ostype
);
2117 if (!is_gimple_assign (stmt
))
2119 /* Clear BASE0 since the assigned pointer might point into
2120 the middle of the object, set the maximum size range and,
2121 if the SSA_NAME refers to a function argumnent, set
2123 pref
->base0
= false;
2124 pref
->set_max_size_range ();
2129 tree_code code
= gimple_assign_rhs_code (stmt
);
2131 if (code
== MAX_EXPR
|| code
== MIN_EXPR
)
2133 if (!handle_min_max_size (ptr
, ostype
, pref
, snlim
, qry
))
2136 qry
->put_ref (ptr
, *pref
, ostype
);
2140 tree rhs
= gimple_assign_rhs1 (stmt
);
2142 if (code
== ASSERT_EXPR
)
2144 rhs
= TREE_OPERAND (rhs
, 0);
2145 return compute_objsize_r (rhs
, stmt
, addr
, ostype
, pref
, snlim
, qry
);
2148 if (code
== POINTER_PLUS_EXPR
2149 && TREE_CODE (TREE_TYPE (rhs
)) == POINTER_TYPE
)
2151 /* Compute the size of the object first. */
2152 if (!compute_objsize_r (rhs
, stmt
, addr
, ostype
, pref
, snlim
, qry
))
2156 tree off
= gimple_assign_rhs2 (stmt
);
2157 range_query
*const rvals
= qry
? qry
->rvals
: NULL
;
2158 if (get_offset_range (off
, stmt
, orng
, rvals
))
2159 pref
->add_offset (orng
[0], orng
[1]);
2161 pref
->add_max_offset ();
2163 qry
->put_ref (ptr
, *pref
, ostype
);
2167 if (code
== ADDR_EXPR
|| code
== SSA_NAME
)
2169 if (!compute_objsize_r (rhs
, stmt
, addr
, ostype
, pref
, snlim
, qry
))
2171 qry
->put_ref (ptr
, *pref
, ostype
);
2175 if (ostype
> 1 && POINTER_TYPE_P (TREE_TYPE (rhs
)))
2177 /* When determining the qualifiers follow the pointer but
2178 avoid caching the result. As the pointer is added to
2179 and/or dereferenced the computed size and offset need
2180 not be meaningful for other queries involving the same
2182 if (!compute_objsize_r (rhs
, stmt
, addr
, ostype
, pref
, snlim
, qry
))
2188 /* (This could also be an assignment from a nonlocal pointer.) Save
2189 PTR to mention in diagnostics but otherwise treat it as a pointer
2190 to an unknown object. */
2192 pref
->base0
= false;
2193 pref
->set_max_size_range ();
2197 /* Helper to compute the size of the object referenced by the PTR
2198 expression which must have pointer type, using Object Size type
2199 OSTYPE (only the least significant 2 bits are used).
2200 On success, sets PREF->REF to the DECL of the referenced object
2201 if it's unique, otherwise to null, PREF->OFFRNG to the range of
2202 offsets into it, and PREF->SIZRNG to the range of sizes of
2204 ADDR is true for an enclosing ADDR_EXPR.
2205 SNLIM is used to avoid visiting the same PHI operand multiple
2206 times, and, when nonnull, RVALS to determine range information.
2207 Returns true on success, false when a meaningful size (or range)
2208 cannot be determined.
2210 The function is intended for diagnostics and should not be used
2211 to influence code generation or optimization. */
2214 compute_objsize_r (tree ptr
, gimple
*stmt
, bool addr
, int ostype
,
2215 access_ref
*pref
, ssa_name_limit_t
&snlim
,
2221 return handle_decl (ptr
, addr
, pref
);
2223 switch (TREE_CODE (ptr
))
2227 tree ref
= TREE_OPERAND (ptr
, 0);
2228 if (!compute_objsize_r (ref
, stmt
, true, ostype
, pref
, snlim
, qry
))
2237 tree ref
= TREE_OPERAND (ptr
, 0);
2238 if (!compute_objsize_r (ref
, stmt
, addr
, ostype
, pref
, snlim
, qry
))
2241 offset_int off
= wi::to_offset (pref
->eval (TREE_OPERAND (ptr
, 2)));
2242 pref
->add_offset (off
/ BITS_PER_UNIT
);
2247 return handle_array_ref (ptr
, stmt
, addr
, ostype
, pref
, snlim
, qry
);
2250 return handle_component_ref (ptr
, stmt
, addr
, ostype
, pref
, snlim
, qry
);
2253 return handle_mem_ref (ptr
, stmt
, ostype
, pref
, snlim
, qry
);
2255 case TARGET_MEM_REF
:
2257 tree ref
= TREE_OPERAND (ptr
, 0);
2258 if (!compute_objsize_r (ref
, stmt
, addr
, ostype
, pref
, snlim
, qry
))
2261 /* TODO: Handle remaining operands. Until then, add maximum offset. */
2263 pref
->add_max_offset ();
2268 /* Pointer constants other than null smaller than param_min_pagesize
2269 might be the result of erroneous null pointer addition/subtraction.
2270 Unless zero is a valid address set size to zero. For null pointers,
2271 set size to the maximum for now since those may be the result of
2272 jump threading. Similarly, for values >= param_min_pagesize in
2273 order to support (type *) 0x7cdeab00. */
2274 if (integer_zerop (ptr
)
2275 || wi::to_widest (ptr
) >= param_min_pagesize
)
2276 pref
->set_max_size_range ();
2277 else if (POINTER_TYPE_P (TREE_TYPE (ptr
)))
2279 tree deref_type
= TREE_TYPE (TREE_TYPE (ptr
));
2280 addr_space_t as
= TYPE_ADDR_SPACE (deref_type
);
2281 if (targetm
.addr_space
.zero_address_valid (as
))
2282 pref
->set_max_size_range ();
2284 pref
->sizrng
[0] = pref
->sizrng
[1] = 0;
2287 pref
->sizrng
[0] = pref
->sizrng
[1] = 0;
2293 pref
->sizrng
[0] = pref
->sizrng
[1] = TREE_STRING_LENGTH (ptr
);
2297 case POINTER_PLUS_EXPR
:
2299 tree ref
= TREE_OPERAND (ptr
, 0);
2300 if (!compute_objsize_r (ref
, stmt
, addr
, ostype
, pref
, snlim
, qry
))
2303 /* The below only makes sense if the offset is being applied to the
2304 address of the object. */
2305 if (pref
->deref
!= -1)
2309 tree off
= pref
->eval (TREE_OPERAND (ptr
, 1));
2310 if (get_offset_range (off
, stmt
, orng
, qry
->rvals
))
2311 pref
->add_offset (orng
[0], orng
[1]);
2313 pref
->add_max_offset ();
2317 case VIEW_CONVERT_EXPR
:
2318 ptr
= TREE_OPERAND (ptr
, 0);
2319 return compute_objsize_r (ptr
, stmt
, addr
, ostype
, pref
, snlim
, qry
);
2322 return handle_ssa_name (ptr
, addr
, ostype
, pref
, snlim
, qry
);
2328 /* Assume all other expressions point into an unknown object
2329 of the maximum valid size. */
2331 pref
->base0
= false;
2332 pref
->set_max_size_range ();
2333 if (TREE_CODE (ptr
) == SSA_NAME
)
2334 qry
->put_ref (ptr
, *pref
);
2338 /* A "public" wrapper around the above. Clients should use this overload
2342 compute_objsize (tree ptr
, gimple
*stmt
, int ostype
, access_ref
*pref
,
2343 pointer_query
*ptr_qry
)
2351 /* Clear and invalidate in case *PREF is being reused. */
2352 pref
->offrng
[0] = pref
->offrng
[1] = 0;
2353 pref
->sizrng
[0] = pref
->sizrng
[1] = -1;
2355 ssa_name_limit_t snlim
;
2356 if (!compute_objsize_r (ptr
, stmt
, false, ostype
, pref
, snlim
, ptr_qry
))
2359 offset_int maxsize
= pref
->size_remaining ();
2360 if (pref
->base0
&& pref
->offrng
[0] < 0 && pref
->offrng
[1] >= 0)
2361 pref
->offrng
[0] = 0;
2362 return wide_int_to_tree (sizetype
, maxsize
);
2365 /* Transitional wrapper. The function should be removed once callers
2366 transition to the pointer_query API. */
2369 compute_objsize (tree ptr
, gimple
*stmt
, int ostype
, access_ref
*pref
,
2370 range_query
*rvals
/* = NULL */)
2374 return compute_objsize (ptr
, stmt
, ostype
, pref
, &qry
);
2377 /* Legacy wrapper around the above. The function should be removed
2378 once callers transition to one of the two above. */
2381 compute_objsize (tree ptr
, gimple
*stmt
, int ostype
, tree
*pdecl
/* = NULL */,
2382 tree
*poff
/* = NULL */, range_query
*rvals
/* = NULL */)
2384 /* Set the initial offsets to zero and size to negative to indicate
2385 none has been computed yet. */
2387 tree size
= compute_objsize (ptr
, stmt
, ostype
, &ref
, rvals
);
2388 if (!size
|| !ref
.base0
)
2395 *poff
= wide_int_to_tree (ptrdiff_type_node
, ref
.offrng
[ref
.offrng
[0] < 0]);
2400 /* Determine the offset *FLDOFF of the first byte of a struct member
2401 of TYPE (possibly recursively) into which the byte offset OFF points,
2402 starting after the field START_AFTER if it's non-null. On success,
2403 if nonnull, set *FLDOFF to the offset of the first byte, and return
2404 the field decl. If nonnull, set *NEXTOFF to the offset of the next
2405 field (which reflects any padding between the returned field and
2406 the next). Otherwise, if no such member can be found, return null. */
2409 field_at_offset (tree type
, tree start_after
, HOST_WIDE_INT off
,
2410 HOST_WIDE_INT
*fldoff
/* = nullptr */,
2411 HOST_WIDE_INT
*nextoff
/* = nullptr */)
2413 tree first_fld
= TYPE_FIELDS (type
);
2415 HOST_WIDE_INT offbuf
= 0, nextbuf
= 0;
2423 /* The field to return. */
2424 tree last_fld
= NULL_TREE
;
2425 /* The next field to advance to. */
2426 tree next_fld
= NULL_TREE
;
2428 /* NEXT_FLD's cached offset. */
2429 HOST_WIDE_INT next_pos
= -1;
2431 for (tree fld
= first_fld
; fld
; fld
= next_fld
)
2435 /* Advance to the next relevant data member. */
2436 next_fld
= TREE_CHAIN (next_fld
);
2438 && (TREE_CODE (next_fld
) != FIELD_DECL
2439 || DECL_ARTIFICIAL (next_fld
)));
2441 if (TREE_CODE (fld
) != FIELD_DECL
|| DECL_ARTIFICIAL (fld
))
2444 if (fld
== start_after
)
2447 tree fldtype
= TREE_TYPE (fld
);
2448 /* The offset of FLD within its immediately enclosing structure. */
2449 HOST_WIDE_INT fldpos
= next_pos
< 0 ? int_byte_position (fld
) : next_pos
;
2451 tree typesize
= TYPE_SIZE_UNIT (fldtype
);
2452 if (typesize
&& TREE_CODE (typesize
) != INTEGER_CST
)
2453 /* Bail if FLD is a variable length member. */
2456 /* If the size is not available the field is a flexible array
2457 member. Treat this case as success. */
2458 HOST_WIDE_INT fldsize
= (tree_fits_uhwi_p (typesize
)
2459 ? tree_to_uhwi (typesize
)
2462 /* If OFF is beyond the end of the current field continue. */
2463 HOST_WIDE_INT fldend
= fldpos
+ fldsize
;
2469 /* If OFF is equal to the offset of the next field continue
2470 to it and skip the array/struct business below. */
2471 tree pos
= byte_position (next_fld
);
2472 if (!tree_fits_shwi_p (pos
))
2473 /* Bail if NEXT_FLD is a variable length member. */
2475 next_pos
= tree_to_shwi (pos
);
2476 *nextoff
= *fldoff
+ next_pos
;
2477 if (*nextoff
== off
&& TREE_CODE (type
) != UNION_TYPE
)
2481 *nextoff
= HOST_WIDE_INT_MAX
;
2483 /* OFF refers somewhere into the current field or just past its end,
2484 which could mean it refers to the next field. */
2485 if (TREE_CODE (fldtype
) == ARRAY_TYPE
)
2487 /* Will be set to the offset of the first byte of the array
2488 element (which may be an array) of FLDTYPE into which
2489 OFF - FLDPOS points (which may be past ELTOFF). */
2490 HOST_WIDE_INT eltoff
= 0;
2491 if (tree ft
= array_elt_at_offset (fldtype
, off
- fldpos
, &eltoff
))
2496 /* Advance the position to include the array element above.
2497 If OFF - FLPOS refers to a member of FLDTYPE, the member
2498 will be determined below. */
2504 if (TREE_CODE (fldtype
) == RECORD_TYPE
)
2505 /* Drill down into the current field if it's a struct. */
2506 fld
= field_at_offset (fldtype
, start_after
, off
- fldpos
,
2511 /* Unless the offset is just past the end of the field return it.
2512 Otherwise save it and return it only if the offset of the next
2513 next field is greater (i.e., there is padding between the two)
2514 or if there is no next field. */
2519 if (*nextoff
== HOST_WIDE_INT_MAX
&& next_fld
)
2520 *nextoff
= next_pos
;
2525 /* Determine the offset *ELTOFF of the first byte of the array element
2526 of array ARTYPE into which the byte offset OFF points. On success
2527 set *ELTOFF to the offset of the first byte and return type.
2528 Otherwise, if no such element can be found, return null. */
2531 array_elt_at_offset (tree artype
, HOST_WIDE_INT off
,
2532 HOST_WIDE_INT
*eltoff
/* = nullptr */,
2533 HOST_WIDE_INT
*subar_size
/* = nullptr */)
2535 gcc_assert (TREE_CODE (artype
) == ARRAY_TYPE
);
2537 HOST_WIDE_INT dummy
;
2541 subar_size
= &dummy
;
2543 tree eltype
= artype
;
2544 while (TREE_CODE (TREE_TYPE (eltype
)) == ARRAY_TYPE
)
2545 eltype
= TREE_TYPE (eltype
);
2547 tree subartype
= eltype
;
2548 if (RECORD_OR_UNION_TYPE_P (TREE_TYPE (eltype
))
2549 || TYPE_MODE (TREE_TYPE (eltype
)) != TYPE_MODE (char_type_node
))
2550 eltype
= TREE_TYPE (eltype
);
2552 *subar_size
= int_size_in_bytes (subartype
);
2554 if (eltype
== artype
)
2560 HOST_WIDE_INT artype_size
= int_size_in_bytes (artype
);
2561 HOST_WIDE_INT eltype_size
= int_size_in_bytes (eltype
);
2563 if (off
< artype_size
)// * eltype_size)
2565 *eltoff
= (off
/ eltype_size
) * eltype_size
;
2566 return TREE_CODE (eltype
) == ARRAY_TYPE
? TREE_TYPE (eltype
) : eltype
;
2572 /* Wrapper around build_array_type_nelts that makes sure the array
2573 can be created at all and handles zero sized arrays specially. */
2576 build_printable_array_type (tree eltype
, unsigned HOST_WIDE_INT nelts
)
2578 if (TYPE_SIZE_UNIT (eltype
)
2579 && TREE_CODE (TYPE_SIZE_UNIT (eltype
)) == INTEGER_CST
2580 && !integer_zerop (TYPE_SIZE_UNIT (eltype
))
2581 && TYPE_ALIGN_UNIT (eltype
) > 1
2582 && wi::zext (wi::to_wide (TYPE_SIZE_UNIT (eltype
)),
2583 ffs_hwi (TYPE_ALIGN_UNIT (eltype
)) - 1) != 0)
2584 eltype
= TYPE_MAIN_VARIANT (eltype
);
2586 /* Consider excessive NELTS an array of unknown bound. */
2587 tree idxtype
= NULL_TREE
;
2588 if (nelts
< HOST_WIDE_INT_MAX
)
2591 return build_array_type_nelts (eltype
, nelts
);
2592 idxtype
= build_range_type (sizetype
, size_zero_node
, NULL_TREE
);
2595 tree arrtype
= build_array_type (eltype
, idxtype
);
2596 arrtype
= build_distinct_type_copy (TYPE_MAIN_VARIANT (arrtype
));
2597 TYPE_SIZE (arrtype
) = bitsize_zero_node
;
2598 TYPE_SIZE_UNIT (arrtype
) = size_zero_node
;