2 * sparse/smatch_ranges.c
4 * Copyright (C) 2009 Dan Carpenter.
6 * Licensed under the Open Software License version 1.1
12 #include "smatch_extra.h"
13 #include "smatch_slist.h"
15 ALLOCATOR(data_info
, "smatch extra data");
16 ALLOCATOR(data_range
, "data range");
17 __DO_ALLOCATOR(struct data_range
, sizeof(struct data_range
), __alignof__(struct data_range
),
18 "permanent ranges", perm_data_range
);
20 char *show_rl(struct range_list
*list
)
22 struct data_range
*tmp
;
28 FOR_EACH_PTR(list
, tmp
) {
30 strncat(full
, ",", 254 - strlen(full
));
31 if (sval_cmp(tmp
->min
, tmp
->max
) == 0) {
32 strncat(full
, sval_to_str(tmp
->min
), 254 - strlen(full
));
35 strncat(full
, sval_to_str(tmp
->min
), 254 - strlen(full
));
36 strncat(full
, "-", 254 - strlen(full
));
37 strncat(full
, sval_to_str(tmp
->max
), 254 - strlen(full
));
38 } END_FOR_EACH_PTR(tmp
);
39 return alloc_sname(full
);
42 static sval_t
get_val_from_key(int use_max
, struct symbol
*type
, char *c
, struct expression
*call
, char **endp
)
44 struct expression
*arg
;
49 ret
= sval_type_max(type
);
51 ret
= sval_type_min(type
);
53 c
++; /* skip the '<' character */
54 param
= strtoll(c
, &c
, 10);
55 c
++; /* skip the '>' character */
60 arg
= get_argument_from_call_expr(call
->args
, param
);
64 if (use_max
&& get_implied_max(arg
, &tmp
))
66 if (!use_max
&& get_implied_min(arg
, &tmp
))
72 static sval_t
parse_val(int use_max
, struct expression
*call
, struct symbol
*type
, char *c
, char **endp
)
77 if (!strncmp(start
, "max", 3)) {
78 ret
= sval_type_max(type
);
80 } else if (!strncmp(start
, "u64max", 6)) {
81 ret
= sval_type_val(type
, ULLONG_MAX
);
83 } else if (!strncmp(start
, "s64max", 6)) {
84 ret
= sval_type_val(type
, LLONG_MAX
);
86 } else if (!strncmp(start
, "u32max", 6)) {
87 ret
= sval_type_val(type
, UINT_MAX
);
89 } else if (!strncmp(start
, "s32max", 6)) {
90 ret
= sval_type_val(type
, INT_MAX
);
92 } else if (!strncmp(start
, "u16max", 6)) {
93 ret
= sval_type_val(type
, USHRT_MAX
);
95 } else if (!strncmp(start
, "s16max", 6)) {
96 ret
= sval_type_val(type
, SHRT_MAX
);
98 } else if (!strncmp(start
, "min", 3)) {
99 ret
= sval_type_min(type
);
101 } else if (!strncmp(start
, "s64min", 6)) {
102 ret
= sval_type_val(type
, LLONG_MIN
);
104 } else if (!strncmp(start
, "s32min", 6)) {
105 ret
= sval_type_val(type
, INT_MIN
);
107 } else if (!strncmp(start
, "s16min", 6)) {
108 ret
= sval_type_val(type
, SHRT_MIN
);
110 } else if (start
[0] == '<') {
111 ret
= get_val_from_key(1, type
, start
, call
, &c
);
113 ret
= sval_type_val(type
, strtoll(start
, &c
, 10));
119 static void str_to_rl_helper(struct expression
*call
, struct symbol
*type
, char *value
, struct range_list
**rl
)
128 if (value
&& strcmp(value
, "empty") == 0)
135 min
= parse_val(0, call
, type
, c
, &c
);
139 add_range(rl
, min
, min
);
143 add_range(rl
, min
, min
);
148 sm_msg("debug XXX: trouble parsing %s ", value
);
154 max
= parse_val(1, call
, type
, c
, &c
);
155 add_range(rl
, min
, max
);
161 sm_msg("debug YYY: trouble parsing %s %s", value
, c
);
167 *rl
= cast_rl(type
, *rl
);
170 void str_to_rl(struct symbol
*type
, char *value
, struct range_list
**rl
)
172 return str_to_rl_helper(NULL
, type
, value
, rl
);
175 void call_results_to_rl(struct expression
*expr
, struct symbol
*type
, char *value
, struct range_list
**rl
)
177 return str_to_rl_helper(strip_expr(expr
), type
, value
, rl
);
180 int is_whole_rl(struct range_list
*rl
)
182 struct data_range
*drange
;
184 if (ptr_list_empty(rl
))
186 drange
= first_ptr_list((struct ptr_list
*)rl
);
187 if (sval_is_min(drange
->min
) && sval_is_max(drange
->max
))
192 sval_t
rl_min(struct range_list
*rl
)
194 struct data_range
*drange
;
197 ret
.type
= &llong_ctype
;
198 ret
.value
= LLONG_MIN
;
199 if (ptr_list_empty(rl
))
201 drange
= first_ptr_list((struct ptr_list
*)rl
);
205 sval_t
rl_max(struct range_list
*rl
)
207 struct data_range
*drange
;
210 ret
.type
= &llong_ctype
;
211 ret
.value
= LLONG_MAX
;
212 if (ptr_list_empty(rl
))
214 drange
= last_ptr_list((struct ptr_list
*)rl
);
218 int rl_to_sval(struct range_list
*rl
, sval_t
*sval
)
227 if (sval_cmp(min
, max
) != 0)
233 struct symbol
*rl_type(struct range_list
*rl
)
237 return rl_min(rl
).type
;
240 static struct data_range
*alloc_range_helper_sval(sval_t min
, sval_t max
, int perm
)
242 struct data_range
*ret
;
245 ret
= __alloc_perm_data_range(0);
247 ret
= __alloc_data_range(0);
253 struct data_range
*alloc_range(sval_t min
, sval_t max
)
255 return alloc_range_helper_sval(min
, max
, 0);
258 struct data_range
*alloc_range_perm(sval_t min
, sval_t max
)
260 return alloc_range_helper_sval(min
, max
, 1);
263 struct range_list
*alloc_rl(sval_t min
, sval_t max
)
265 struct range_list
*rl
= NULL
;
267 if (sval_cmp(min
, max
) > 0)
268 return alloc_whole_rl(min
.type
);
270 add_range(&rl
, min
, max
);
274 struct range_list
*alloc_whole_rl(struct symbol
*type
)
276 if (!type
|| type_positive_bits(type
) < 0)
279 return alloc_rl(sval_type_min(type
), sval_type_max(type
));
282 void add_range(struct range_list
**list
, sval_t min
, sval_t max
)
284 struct data_range
*tmp
= NULL
;
285 struct data_range
*new = NULL
;
289 * FIXME: This has a problem merging a range_list like: min-0,3-max
290 * with a range like 1-2. You end up with min-2,3-max instead of
293 FOR_EACH_PTR(*list
, tmp
) {
295 /* Sometimes we overlap with more than one range
296 so we have to delete or modify the next range. */
297 if (max
.value
+ 1 == tmp
->min
.value
) {
298 /* join 2 ranges here */
300 DELETE_CURRENT_PTR(tmp
);
304 /* Doesn't overlap with the next one. */
305 if (sval_cmp(max
, tmp
->min
) < 0)
307 /* Partially overlaps with the next one. */
308 if (sval_cmp(max
, tmp
->max
) < 0) {
309 tmp
->min
.value
= max
.value
+ 1;
312 /* Completely overlaps with the next one. */
313 if (sval_cmp(max
, tmp
->max
) >= 0) {
314 DELETE_CURRENT_PTR(tmp
);
315 /* there could be more ranges to delete */
319 if (!sval_is_max(max
) && max
.value
+ 1 == tmp
->min
.value
) {
320 /* join 2 ranges into a big range */
321 new = alloc_range(min
, tmp
->max
);
322 REPLACE_CURRENT_PTR(tmp
, new);
325 if (sval_cmp(max
, tmp
->min
) < 0) { /* new range entirely below */
326 new = alloc_range(min
, max
);
327 INSERT_CURRENT(new, tmp
);
330 if (sval_cmp(min
, tmp
->min
) < 0) { /* new range partially below */
331 if (sval_cmp(max
, tmp
->max
) < 0)
335 new = alloc_range(min
, max
);
336 REPLACE_CURRENT_PTR(tmp
, new);
341 if (sval_cmp(max
, tmp
->max
) <= 0) /* new range already included */
343 if (sval_cmp(min
, tmp
->max
) <= 0) { /* new range partially above */
345 new = alloc_range(min
, max
);
346 REPLACE_CURRENT_PTR(tmp
, new);
350 if (!sval_is_min(min
) && min
.value
- 1 == tmp
->max
.value
) {
351 /* join 2 ranges into a big range */
352 new = alloc_range(tmp
->min
, max
);
353 REPLACE_CURRENT_PTR(tmp
, new);
357 /* the new range is entirely above the existing ranges */
358 } END_FOR_EACH_PTR(tmp
);
361 new = alloc_range(min
, max
);
362 add_ptr_list(list
, new);
365 struct range_list
*clone_rl(struct range_list
*list
)
367 struct data_range
*tmp
;
368 struct range_list
*ret
= NULL
;
370 FOR_EACH_PTR(list
, tmp
) {
371 add_ptr_list(&ret
, tmp
);
372 } END_FOR_EACH_PTR(tmp
);
376 struct range_list
*clone_rl_permanent(struct range_list
*list
)
378 struct data_range
*tmp
;
379 struct data_range
*new;
380 struct range_list
*ret
= NULL
;
382 FOR_EACH_PTR(list
, tmp
) {
383 new = alloc_range_perm(tmp
->min
, tmp
->max
);
384 add_ptr_list(&ret
, new);
385 } END_FOR_EACH_PTR(tmp
);
389 struct range_list
*rl_union(struct range_list
*one
, struct range_list
*two
)
391 struct data_range
*tmp
;
392 struct range_list
*ret
= NULL
;
394 FOR_EACH_PTR(one
, tmp
) {
395 add_range(&ret
, tmp
->min
, tmp
->max
);
396 } END_FOR_EACH_PTR(tmp
);
397 FOR_EACH_PTR(two
, tmp
) {
398 add_range(&ret
, tmp
->min
, tmp
->max
);
399 } END_FOR_EACH_PTR(tmp
);
403 struct range_list
*remove_range(struct range_list
*list
, sval_t min
, sval_t max
)
405 struct data_range
*tmp
;
406 struct range_list
*ret
= NULL
;
408 FOR_EACH_PTR(list
, tmp
) {
409 if (sval_cmp(tmp
->max
, min
) < 0) {
410 add_range(&ret
, tmp
->min
, tmp
->max
);
413 if (sval_cmp(tmp
->min
, max
) > 0) {
414 add_range(&ret
, tmp
->min
, tmp
->max
);
417 if (sval_cmp(tmp
->min
, min
) >= 0 && sval_cmp(tmp
->max
, max
) <= 0)
419 if (sval_cmp(tmp
->min
, min
) >= 0) {
421 add_range(&ret
, max
, tmp
->max
);
422 } else if (sval_cmp(tmp
->max
, max
) <= 0) {
424 add_range(&ret
, tmp
->min
, min
);
428 add_range(&ret
, tmp
->min
, min
);
429 add_range(&ret
, max
, tmp
->max
);
431 } END_FOR_EACH_PTR(tmp
);
435 int ranges_equiv(struct data_range
*one
, struct data_range
*two
)
441 if (sval_cmp(one
->min
, two
->min
) != 0)
443 if (sval_cmp(one
->max
, two
->max
) != 0)
448 int rl_equiv(struct range_list
*one
, struct range_list
*two
)
450 struct data_range
*one_range
;
451 struct data_range
*two_range
;
456 PREPARE_PTR_LIST(one
, one_range
);
457 PREPARE_PTR_LIST(two
, two_range
);
459 if (!one_range
&& !two_range
)
461 if (!ranges_equiv(one_range
, two_range
))
463 NEXT_PTR_LIST(one_range
);
464 NEXT_PTR_LIST(two_range
);
466 FINISH_PTR_LIST(two_range
);
467 FINISH_PTR_LIST(one_range
);
472 int true_comparison_range(struct data_range
*left
, int comparison
, struct data_range
*right
)
474 switch (comparison
) {
476 case SPECIAL_UNSIGNED_LT
:
477 if (sval_cmp(left
->min
, right
->max
) < 0)
480 case SPECIAL_UNSIGNED_LTE
:
482 if (sval_cmp(left
->min
, right
->max
) <= 0)
486 if (sval_cmp(left
->max
, right
->min
) < 0)
488 if (sval_cmp(left
->min
, right
->max
) > 0)
491 case SPECIAL_UNSIGNED_GTE
:
493 if (sval_cmp(left
->max
, right
->min
) >= 0)
497 case SPECIAL_UNSIGNED_GT
:
498 if (sval_cmp(left
->max
, right
->min
) > 0)
501 case SPECIAL_NOTEQUAL
:
502 if (sval_cmp(left
->min
, left
->max
) != 0)
504 if (sval_cmp(right
->min
, right
->max
) != 0)
506 if (sval_cmp(left
->min
, right
->min
) != 0)
510 sm_msg("unhandled comparison %d\n", comparison
);
516 int true_comparison_range_LR(int comparison
, struct data_range
*var
, struct data_range
*val
, int left
)
519 return true_comparison_range(var
, comparison
, val
);
521 return true_comparison_range(val
, comparison
, var
);
524 static int false_comparison_range_sval(struct data_range
*left
, int comparison
, struct data_range
*right
)
526 switch (comparison
) {
528 case SPECIAL_UNSIGNED_LT
:
529 if (sval_cmp(left
->max
, right
->min
) >= 0)
532 case SPECIAL_UNSIGNED_LTE
:
534 if (sval_cmp(left
->max
, right
->min
) > 0)
538 if (sval_cmp(left
->min
, left
->max
) != 0)
540 if (sval_cmp(right
->min
, right
->max
) != 0)
542 if (sval_cmp(left
->min
, right
->min
) != 0)
545 case SPECIAL_UNSIGNED_GTE
:
547 if (sval_cmp(left
->min
, right
->max
) < 0)
551 case SPECIAL_UNSIGNED_GT
:
552 if (sval_cmp(left
->min
, right
->max
) <= 0)
555 case SPECIAL_NOTEQUAL
:
556 if (sval_cmp(left
->max
, right
->min
) < 0)
558 if (sval_cmp(left
->min
, right
->max
) > 0)
562 sm_msg("unhandled comparison %d\n", comparison
);
568 int false_comparison_range_LR(int comparison
, struct data_range
*var
, struct data_range
*val
, int left
)
571 return false_comparison_range_sval(var
, comparison
, val
);
573 return false_comparison_range_sval(val
, comparison
, var
);
576 int possibly_true(struct expression
*left
, int comparison
, struct expression
*right
)
578 struct range_list
*rl_left
, *rl_right
;
579 struct data_range
*tmp_left
, *tmp_right
;
581 if (!get_implied_rl(left
, &rl_left
))
583 if (!get_implied_rl(right
, &rl_right
))
586 FOR_EACH_PTR(rl_left
, tmp_left
) {
587 FOR_EACH_PTR(rl_right
, tmp_right
) {
588 if (true_comparison_range(tmp_left
, comparison
, tmp_right
))
590 } END_FOR_EACH_PTR(tmp_right
);
591 } END_FOR_EACH_PTR(tmp_left
);
595 int possibly_false(struct expression
*left
, int comparison
, struct expression
*right
)
597 struct range_list
*rl_left
, *rl_right
;
598 struct data_range
*tmp_left
, *tmp_right
;
600 if (!get_implied_rl(left
, &rl_left
))
602 if (!get_implied_rl(right
, &rl_right
))
605 FOR_EACH_PTR(rl_left
, tmp_left
) {
606 FOR_EACH_PTR(rl_right
, tmp_right
) {
607 if (false_comparison_range_sval(tmp_left
, comparison
, tmp_right
))
609 } END_FOR_EACH_PTR(tmp_right
);
610 } END_FOR_EACH_PTR(tmp_left
);
614 int possibly_true_rl(struct range_list
*left_ranges
, int comparison
, struct range_list
*right_ranges
)
616 struct data_range
*left_tmp
, *right_tmp
;
618 if (!left_ranges
|| !right_ranges
)
621 FOR_EACH_PTR(left_ranges
, left_tmp
) {
622 FOR_EACH_PTR(right_ranges
, right_tmp
) {
623 if (true_comparison_range(left_tmp
, comparison
, right_tmp
))
625 } END_FOR_EACH_PTR(right_tmp
);
626 } END_FOR_EACH_PTR(left_tmp
);
630 int possibly_false_rl(struct range_list
*left_ranges
, int comparison
, struct range_list
*right_ranges
)
632 struct data_range
*left_tmp
, *right_tmp
;
634 if (!left_ranges
|| !right_ranges
)
637 FOR_EACH_PTR(left_ranges
, left_tmp
) {
638 FOR_EACH_PTR(right_ranges
, right_tmp
) {
639 if (false_comparison_range_sval(left_tmp
, comparison
, right_tmp
))
641 } END_FOR_EACH_PTR(right_tmp
);
642 } END_FOR_EACH_PTR(left_tmp
);
646 /* FIXME: the _rl here stands for right left so really it should be _lr */
647 int possibly_true_rl_LR(int comparison
, struct range_list
*a
, struct range_list
*b
, int left
)
650 return possibly_true_rl(a
, comparison
, b
);
652 return possibly_true_rl(b
, comparison
, a
);
655 int possibly_false_rl_LR(int comparison
, struct range_list
*a
, struct range_list
*b
, int left
)
658 return possibly_false_rl(a
, comparison
, b
);
660 return possibly_false_rl(b
, comparison
, a
);
663 void tack_on(struct range_list
**list
, struct data_range
*drange
)
665 add_ptr_list(list
, drange
);
668 void push_rl(struct range_list_stack
**rl_stack
, struct range_list
*rl
)
670 add_ptr_list(rl_stack
, rl
);
673 struct range_list
*pop_rl(struct range_list_stack
**rl_stack
)
675 struct range_list
*rl
;
677 rl
= last_ptr_list((struct ptr_list
*)*rl_stack
);
678 delete_ptr_list_last((struct ptr_list
**)rl_stack
);
682 struct range_list
*top_rl(struct range_list_stack
*rl_stack
)
684 struct range_list
*rl
;
686 rl
= last_ptr_list((struct ptr_list
*)rl_stack
);
690 void filter_top_rl(struct range_list_stack
**rl_stack
, sval_t sval
)
692 struct range_list
*rl
;
694 rl
= pop_rl(rl_stack
);
695 rl
= remove_range(rl
, sval
, sval
);
696 push_rl(rl_stack
, rl
);
699 static int sval_too_big(struct symbol
*type
, sval_t sval
)
701 if (type_bits(type
) == 64)
703 if (sval
.uvalue
> ((1ULL << type_bits(type
)) - 1))
708 static void add_range_t(struct symbol
*type
, struct range_list
**rl
, sval_t min
, sval_t max
)
710 /* If we're just adding a number, cast it and add it */
711 if (sval_cmp(min
, max
) == 0) {
712 add_range(rl
, sval_cast(type
, min
), sval_cast(type
, max
));
716 /* If the range is within the type range then add it */
717 if (sval_fits(type
, min
) && sval_fits(type
, max
)) {
718 add_range(rl
, sval_cast(type
, min
), sval_cast(type
, max
));
723 * If the range we are adding has more bits than the range type then
724 * add the whole range type. Eg:
725 * 0x8000000000000000 - 0xf000000000000000 -> cast to int
726 * This isn't totally the right thing to do. We could be more granular.
728 if (sval_too_big(type
, min
) || sval_too_big(type
, max
)) {
729 add_range(rl
, sval_type_min(type
), sval_type_max(type
));
733 /* Cast negative values to high positive values */
734 if (sval_is_negative(min
) && type_unsigned(type
)) {
735 if (sval_is_positive(max
)) {
736 if (sval_too_high(type
, max
)) {
737 add_range(rl
, sval_type_min(type
), sval_type_max(type
));
740 add_range(rl
, sval_type_val(type
, 0), sval_cast(type
, max
));
741 max
= sval_type_max(type
);
743 max
= sval_cast(type
, max
);
745 min
= sval_cast(type
, min
);
746 add_range(rl
, min
, max
);
749 /* Cast high positive numbers to negative */
750 if (sval_unsigned(max
) && sval_is_negative(sval_cast(type
, max
))) {
751 if (!sval_is_negative(sval_cast(type
, min
))) {
752 add_range(rl
, sval_cast(type
, min
), sval_type_max(type
));
753 min
= sval_type_min(type
);
755 min
= sval_cast(type
, min
);
757 max
= sval_cast(type
, max
);
758 add_range(rl
, min
, max
);
764 struct range_list
*rl_truncate_cast(struct symbol
*type
, struct range_list
*rl
)
766 struct data_range
*tmp
;
767 struct range_list
*ret
= NULL
;
773 if (!type
|| type
== rl_type(rl
))
776 FOR_EACH_PTR(rl
, tmp
) {
779 if (type_bits(type
) < type_bits(rl_type(rl
))) {
780 min
.uvalue
= tmp
->min
.uvalue
& ((1ULL << type_bits(type
)) - 1);
781 max
.uvalue
= tmp
->max
.uvalue
& ((1ULL << type_bits(type
)) - 1);
783 if (sval_cmp(min
, max
) > 0) {
784 min
= sval_cast(type
, min
);
785 max
= sval_cast(type
, max
);
787 add_range_t(type
, &ret
, min
, max
);
788 } END_FOR_EACH_PTR(tmp
);
793 static int rl_is_sane(struct range_list
*rl
)
795 struct data_range
*tmp
;
799 FOR_EACH_PTR(rl
, tmp
) {
800 if (!sval_fits(type
, tmp
->min
))
802 if (!sval_fits(type
, tmp
->max
))
804 if (sval_cmp(tmp
->min
, tmp
->max
) > 0)
806 } END_FOR_EACH_PTR(tmp
);
811 static int rl_type_consistent(struct range_list
*rl
)
813 struct data_range
*tmp
;
817 FOR_EACH_PTR(rl
, tmp
) {
818 if (type
!= tmp
->min
.type
|| type
!= tmp
->max
.type
)
820 } END_FOR_EACH_PTR(tmp
);
824 struct range_list
*cast_rl(struct symbol
*type
, struct range_list
*rl
)
826 struct data_range
*tmp
;
827 struct range_list
*ret
= NULL
;
835 return alloc_whole_rl(type
);
836 if (type
== rl_type(rl
) && rl_type_consistent(rl
))
839 FOR_EACH_PTR(rl
, tmp
) {
840 add_range_t(type
, &ret
, tmp
->min
, tmp
->max
);
841 } END_FOR_EACH_PTR(tmp
);
844 return alloc_whole_rl(type
);
849 struct range_list
*rl_invert(struct range_list
*orig
)
851 struct range_list
*ret
= NULL
;
852 struct data_range
*tmp
;
853 sval_t gap_min
, abs_max
, sval
;
858 gap_min
= sval_type_min(rl_min(orig
).type
);
859 abs_max
= sval_type_max(rl_max(orig
).type
);
861 FOR_EACH_PTR(orig
, tmp
) {
862 if (sval_cmp(tmp
->min
, gap_min
) > 0) {
863 sval
= sval_type_val(tmp
->min
.type
, tmp
->min
.value
- 1);
864 add_range(&ret
, gap_min
, sval
);
866 gap_min
= sval_type_val(tmp
->max
.type
, tmp
->max
.value
+ 1);
867 if (sval_cmp(tmp
->max
, abs_max
) == 0)
869 } END_FOR_EACH_PTR(tmp
);
871 if (sval_cmp(gap_min
, abs_max
) < 0)
872 add_range(&ret
, gap_min
, abs_max
);
877 struct range_list
*rl_filter(struct range_list
*rl
, struct range_list
*filter
)
879 struct data_range
*tmp
;
881 FOR_EACH_PTR(filter
, tmp
) {
882 rl
= remove_range(rl
, tmp
->min
, tmp
->max
);
883 } END_FOR_EACH_PTR(tmp
);
888 struct range_list
*rl_intersection(struct range_list
*one
, struct range_list
*two
)
892 two
= rl_invert(two
);
893 return rl_filter(one
, two
);
896 void free_rl(struct range_list
**rlist
)
898 __free_ptr_list((struct ptr_list
**)rlist
);
901 static void free_single_dinfo(struct data_info
*dinfo
)
903 free_rl(&dinfo
->value_ranges
);
906 static void free_dinfos(struct allocation_blob
*blob
)
908 unsigned int size
= sizeof(struct data_info
);
909 unsigned int offset
= 0;
911 while (offset
< blob
->offset
) {
912 free_single_dinfo((struct data_info
*)(blob
->data
+ offset
));
917 void free_data_info_allocs(void)
919 struct allocator_struct
*desc
= &data_info_allocator
;
920 struct allocation_blob
*blob
= desc
->blobs
;
923 desc
->allocations
= 0;
924 desc
->total_bytes
= 0;
925 desc
->useful_bytes
= 0;
926 desc
->freelist
= NULL
;
928 struct allocation_blob
*next
= blob
->next
;
930 blob_free(blob
, desc
->chunking
);
933 clear_data_range_alloc();