2 * sparse/smatch_ranges.c
4 * Copyright (C) 2009 Dan Carpenter.
6 * Licensed under the Open Software License version 1.1
12 #include "smatch_extra.h"
13 #include "smatch_slist.h"
15 ALLOCATOR(data_info
, "smatch extra data");
16 ALLOCATOR(data_range
, "data range");
17 __DO_ALLOCATOR(struct data_range
, sizeof(struct data_range
), __alignof__(struct data_range
),
18 "permanent ranges", perm_data_range
);
20 char *show_rl(struct range_list
*list
)
22 struct data_range
*tmp
;
28 FOR_EACH_PTR(list
, tmp
) {
30 strncat(full
, ",", 254 - strlen(full
));
31 if (sval_cmp(tmp
->min
, tmp
->max
) == 0) {
32 strncat(full
, sval_to_str(tmp
->min
), 254 - strlen(full
));
35 strncat(full
, sval_to_str(tmp
->min
), 254 - strlen(full
));
36 strncat(full
, "-", 254 - strlen(full
));
37 strncat(full
, sval_to_str(tmp
->max
), 254 - strlen(full
));
38 } END_FOR_EACH_PTR(tmp
);
39 return alloc_sname(full
);
42 int str_to_comparison_arg(const char *str
, struct expression
*call
, int *comparison
, struct expression
**arg
, char **endp
)
45 char *c
= (char *)str
;
54 *comparison
= SPECIAL_LTE
;
59 } else if (*c
== '=') {
62 *comparison
= SPECIAL_EQUAL
;
63 } else if (*c
== '>') {
66 *comparison
= SPECIAL_GTE
;
79 param
= strtoll(c
, &c
, 10);
80 c
++; /* skip the ']' character */
86 *arg
= get_argument_from_call_expr(call
->args
, param
);
93 static sval_t
get_val_from_key(int use_max
, struct symbol
*type
, char *c
, struct expression
*call
, char **endp
)
95 struct expression
*arg
;
100 ret
= sval_type_max(type
);
102 ret
= sval_type_min(type
);
104 if (!str_to_comparison_arg(c
, call
, &comparison
, &arg
, endp
))
107 if (use_max
&& get_implied_max(arg
, &tmp
)) {
109 if (comparison
== '<') {
111 ret
= sval_binop(ret
, '-', tmp
);
114 if (!use_max
&& get_implied_min(arg
, &tmp
)) {
116 if (comparison
== '>') {
118 ret
= sval_binop(ret
, '+', tmp
);
125 static sval_t
parse_val(int use_max
, struct expression
*call
, struct symbol
*type
, char *c
, char **endp
)
130 if (!strncmp(start
, "max", 3)) {
131 ret
= sval_type_max(type
);
133 } else if (!strncmp(start
, "u64max", 6)) {
134 ret
= sval_type_val(type
, ULLONG_MAX
);
136 } else if (!strncmp(start
, "s64max", 6)) {
137 ret
= sval_type_val(type
, LLONG_MAX
);
139 } else if (!strncmp(start
, "u32max", 6)) {
140 ret
= sval_type_val(type
, UINT_MAX
);
142 } else if (!strncmp(start
, "s32max", 6)) {
143 ret
= sval_type_val(type
, INT_MAX
);
145 } else if (!strncmp(start
, "u16max", 6)) {
146 ret
= sval_type_val(type
, USHRT_MAX
);
148 } else if (!strncmp(start
, "s16max", 6)) {
149 ret
= sval_type_val(type
, SHRT_MAX
);
151 } else if (!strncmp(start
, "min", 3)) {
152 ret
= sval_type_min(type
);
154 } else if (!strncmp(start
, "s64min", 6)) {
155 ret
= sval_type_val(type
, LLONG_MIN
);
157 } else if (!strncmp(start
, "s32min", 6)) {
158 ret
= sval_type_val(type
, INT_MIN
);
160 } else if (!strncmp(start
, "s16min", 6)) {
161 ret
= sval_type_val(type
, SHRT_MIN
);
163 } else if (start
[0] == '[') {
164 ret
= get_val_from_key(1, type
, start
, call
, &c
);
166 ret
= sval_type_val(type
, strtoll(start
, &c
, 10));
172 static void str_to_rl_helper(struct expression
*call
, struct symbol
*type
, char *value
, struct range_list
**rl
)
181 if (value
&& strcmp(value
, "empty") == 0)
184 if (strncmp(value
, "[==p", 4) == 0) {
185 struct expression
*arg
;
188 if (!str_to_comparison_arg(value
, call
, &comparison
, &arg
, NULL
))
190 get_implied_rl(arg
, rl
);
198 min
= parse_val(0, call
, type
, c
, &c
);
202 add_range(rl
, min
, min
);
206 add_range(rl
, min
, min
);
211 sm_msg("debug XXX: trouble parsing %s ", value
);
217 max
= parse_val(1, call
, type
, c
, &c
);
218 add_range(rl
, min
, max
);
224 sm_msg("debug YYY: trouble parsing %s %s", value
, c
);
231 *rl
= cast_rl(type
, *rl
);
234 void str_to_rl(struct symbol
*type
, char *value
, struct range_list
**rl
)
236 return str_to_rl_helper(NULL
, type
, value
, rl
);
239 void call_results_to_rl(struct expression
*expr
, struct symbol
*type
, char *value
, struct range_list
**rl
)
241 return str_to_rl_helper(strip_expr(expr
), type
, value
, rl
);
244 int is_whole_rl(struct range_list
*rl
)
246 struct data_range
*drange
;
248 if (ptr_list_empty(rl
))
250 drange
= first_ptr_list((struct ptr_list
*)rl
);
251 if (sval_is_min(drange
->min
) && sval_is_max(drange
->max
))
256 sval_t
rl_min(struct range_list
*rl
)
258 struct data_range
*drange
;
261 ret
.type
= &llong_ctype
;
262 ret
.value
= LLONG_MIN
;
263 if (ptr_list_empty(rl
))
265 drange
= first_ptr_list((struct ptr_list
*)rl
);
269 sval_t
rl_max(struct range_list
*rl
)
271 struct data_range
*drange
;
274 ret
.type
= &llong_ctype
;
275 ret
.value
= LLONG_MAX
;
276 if (ptr_list_empty(rl
))
278 drange
= last_ptr_list((struct ptr_list
*)rl
);
282 int rl_to_sval(struct range_list
*rl
, sval_t
*sval
)
291 if (sval_cmp(min
, max
) != 0)
297 struct symbol
*rl_type(struct range_list
*rl
)
301 return rl_min(rl
).type
;
304 static struct data_range
*alloc_range_helper_sval(sval_t min
, sval_t max
, int perm
)
306 struct data_range
*ret
;
309 ret
= __alloc_perm_data_range(0);
311 ret
= __alloc_data_range(0);
317 struct data_range
*alloc_range(sval_t min
, sval_t max
)
319 return alloc_range_helper_sval(min
, max
, 0);
322 struct data_range
*alloc_range_perm(sval_t min
, sval_t max
)
324 return alloc_range_helper_sval(min
, max
, 1);
327 struct range_list
*alloc_rl(sval_t min
, sval_t max
)
329 struct range_list
*rl
= NULL
;
331 if (sval_cmp(min
, max
) > 0)
332 return alloc_whole_rl(min
.type
);
334 add_range(&rl
, min
, max
);
338 struct range_list
*alloc_whole_rl(struct symbol
*type
)
340 if (!type
|| type_positive_bits(type
) < 0)
343 return alloc_rl(sval_type_min(type
), sval_type_max(type
));
346 void add_range(struct range_list
**list
, sval_t min
, sval_t max
)
348 struct data_range
*tmp
= NULL
;
349 struct data_range
*new = NULL
;
353 * FIXME: This has a problem merging a range_list like: min-0,3-max
354 * with a range like 1-2. You end up with min-2,3-max instead of
357 FOR_EACH_PTR(*list
, tmp
) {
359 /* Sometimes we overlap with more than one range
360 so we have to delete or modify the next range. */
361 if (max
.value
+ 1 == tmp
->min
.value
) {
362 /* join 2 ranges here */
364 DELETE_CURRENT_PTR(tmp
);
368 /* Doesn't overlap with the next one. */
369 if (sval_cmp(max
, tmp
->min
) < 0)
371 /* Partially overlaps with the next one. */
372 if (sval_cmp(max
, tmp
->max
) < 0) {
373 tmp
->min
.value
= max
.value
+ 1;
376 /* Completely overlaps with the next one. */
377 if (sval_cmp(max
, tmp
->max
) >= 0) {
378 DELETE_CURRENT_PTR(tmp
);
379 /* there could be more ranges to delete */
383 if (!sval_is_max(max
) && max
.value
+ 1 == tmp
->min
.value
) {
384 /* join 2 ranges into a big range */
385 new = alloc_range(min
, tmp
->max
);
386 REPLACE_CURRENT_PTR(tmp
, new);
389 if (sval_cmp(max
, tmp
->min
) < 0) { /* new range entirely below */
390 new = alloc_range(min
, max
);
391 INSERT_CURRENT(new, tmp
);
394 if (sval_cmp(min
, tmp
->min
) < 0) { /* new range partially below */
395 if (sval_cmp(max
, tmp
->max
) < 0)
399 new = alloc_range(min
, max
);
400 REPLACE_CURRENT_PTR(tmp
, new);
405 if (sval_cmp(max
, tmp
->max
) <= 0) /* new range already included */
407 if (sval_cmp(min
, tmp
->max
) <= 0) { /* new range partially above */
409 new = alloc_range(min
, max
);
410 REPLACE_CURRENT_PTR(tmp
, new);
414 if (!sval_is_min(min
) && min
.value
- 1 == tmp
->max
.value
) {
415 /* join 2 ranges into a big range */
416 new = alloc_range(tmp
->min
, max
);
417 REPLACE_CURRENT_PTR(tmp
, new);
421 /* the new range is entirely above the existing ranges */
422 } END_FOR_EACH_PTR(tmp
);
425 new = alloc_range(min
, max
);
426 add_ptr_list(list
, new);
429 struct range_list
*clone_rl(struct range_list
*list
)
431 struct data_range
*tmp
;
432 struct range_list
*ret
= NULL
;
434 FOR_EACH_PTR(list
, tmp
) {
435 add_ptr_list(&ret
, tmp
);
436 } END_FOR_EACH_PTR(tmp
);
440 struct range_list
*clone_rl_permanent(struct range_list
*list
)
442 struct data_range
*tmp
;
443 struct data_range
*new;
444 struct range_list
*ret
= NULL
;
446 FOR_EACH_PTR(list
, tmp
) {
447 new = alloc_range_perm(tmp
->min
, tmp
->max
);
448 add_ptr_list(&ret
, new);
449 } END_FOR_EACH_PTR(tmp
);
453 struct range_list
*rl_union(struct range_list
*one
, struct range_list
*two
)
455 struct data_range
*tmp
;
456 struct range_list
*ret
= NULL
;
458 FOR_EACH_PTR(one
, tmp
) {
459 add_range(&ret
, tmp
->min
, tmp
->max
);
460 } END_FOR_EACH_PTR(tmp
);
461 FOR_EACH_PTR(two
, tmp
) {
462 add_range(&ret
, tmp
->min
, tmp
->max
);
463 } END_FOR_EACH_PTR(tmp
);
467 struct range_list
*remove_range(struct range_list
*list
, sval_t min
, sval_t max
)
469 struct data_range
*tmp
;
470 struct range_list
*ret
= NULL
;
472 FOR_EACH_PTR(list
, tmp
) {
473 if (sval_cmp(tmp
->max
, min
) < 0) {
474 add_range(&ret
, tmp
->min
, tmp
->max
);
477 if (sval_cmp(tmp
->min
, max
) > 0) {
478 add_range(&ret
, tmp
->min
, tmp
->max
);
481 if (sval_cmp(tmp
->min
, min
) >= 0 && sval_cmp(tmp
->max
, max
) <= 0)
483 if (sval_cmp(tmp
->min
, min
) >= 0) {
485 add_range(&ret
, max
, tmp
->max
);
486 } else if (sval_cmp(tmp
->max
, max
) <= 0) {
488 add_range(&ret
, tmp
->min
, min
);
492 add_range(&ret
, tmp
->min
, min
);
493 add_range(&ret
, max
, tmp
->max
);
495 } END_FOR_EACH_PTR(tmp
);
499 int ranges_equiv(struct data_range
*one
, struct data_range
*two
)
505 if (sval_cmp(one
->min
, two
->min
) != 0)
507 if (sval_cmp(one
->max
, two
->max
) != 0)
512 int rl_equiv(struct range_list
*one
, struct range_list
*two
)
514 struct data_range
*one_range
;
515 struct data_range
*two_range
;
520 PREPARE_PTR_LIST(one
, one_range
);
521 PREPARE_PTR_LIST(two
, two_range
);
523 if (!one_range
&& !two_range
)
525 if (!ranges_equiv(one_range
, two_range
))
527 NEXT_PTR_LIST(one_range
);
528 NEXT_PTR_LIST(two_range
);
530 FINISH_PTR_LIST(two_range
);
531 FINISH_PTR_LIST(one_range
);
536 int true_comparison_range(struct data_range
*left
, int comparison
, struct data_range
*right
)
538 switch (comparison
) {
540 case SPECIAL_UNSIGNED_LT
:
541 if (sval_cmp(left
->min
, right
->max
) < 0)
544 case SPECIAL_UNSIGNED_LTE
:
546 if (sval_cmp(left
->min
, right
->max
) <= 0)
550 if (sval_cmp(left
->max
, right
->min
) < 0)
552 if (sval_cmp(left
->min
, right
->max
) > 0)
555 case SPECIAL_UNSIGNED_GTE
:
557 if (sval_cmp(left
->max
, right
->min
) >= 0)
561 case SPECIAL_UNSIGNED_GT
:
562 if (sval_cmp(left
->max
, right
->min
) > 0)
565 case SPECIAL_NOTEQUAL
:
566 if (sval_cmp(left
->min
, left
->max
) != 0)
568 if (sval_cmp(right
->min
, right
->max
) != 0)
570 if (sval_cmp(left
->min
, right
->min
) != 0)
574 sm_msg("unhandled comparison %d\n", comparison
);
580 int true_comparison_range_LR(int comparison
, struct data_range
*var
, struct data_range
*val
, int left
)
583 return true_comparison_range(var
, comparison
, val
);
585 return true_comparison_range(val
, comparison
, var
);
588 static int false_comparison_range_sval(struct data_range
*left
, int comparison
, struct data_range
*right
)
590 switch (comparison
) {
592 case SPECIAL_UNSIGNED_LT
:
593 if (sval_cmp(left
->max
, right
->min
) >= 0)
596 case SPECIAL_UNSIGNED_LTE
:
598 if (sval_cmp(left
->max
, right
->min
) > 0)
602 if (sval_cmp(left
->min
, left
->max
) != 0)
604 if (sval_cmp(right
->min
, right
->max
) != 0)
606 if (sval_cmp(left
->min
, right
->min
) != 0)
609 case SPECIAL_UNSIGNED_GTE
:
611 if (sval_cmp(left
->min
, right
->max
) < 0)
615 case SPECIAL_UNSIGNED_GT
:
616 if (sval_cmp(left
->min
, right
->max
) <= 0)
619 case SPECIAL_NOTEQUAL
:
620 if (sval_cmp(left
->max
, right
->min
) < 0)
622 if (sval_cmp(left
->min
, right
->max
) > 0)
626 sm_msg("unhandled comparison %d\n", comparison
);
632 int false_comparison_range_LR(int comparison
, struct data_range
*var
, struct data_range
*val
, int left
)
635 return false_comparison_range_sval(var
, comparison
, val
);
637 return false_comparison_range_sval(val
, comparison
, var
);
640 int possibly_true(struct expression
*left
, int comparison
, struct expression
*right
)
642 struct range_list
*rl_left
, *rl_right
;
643 struct data_range
*tmp_left
, *tmp_right
;
645 if (!get_implied_rl(left
, &rl_left
))
647 if (!get_implied_rl(right
, &rl_right
))
650 FOR_EACH_PTR(rl_left
, tmp_left
) {
651 FOR_EACH_PTR(rl_right
, tmp_right
) {
652 if (true_comparison_range(tmp_left
, comparison
, tmp_right
))
654 } END_FOR_EACH_PTR(tmp_right
);
655 } END_FOR_EACH_PTR(tmp_left
);
659 int possibly_false(struct expression
*left
, int comparison
, struct expression
*right
)
661 struct range_list
*rl_left
, *rl_right
;
662 struct data_range
*tmp_left
, *tmp_right
;
664 if (!get_implied_rl(left
, &rl_left
))
666 if (!get_implied_rl(right
, &rl_right
))
669 FOR_EACH_PTR(rl_left
, tmp_left
) {
670 FOR_EACH_PTR(rl_right
, tmp_right
) {
671 if (false_comparison_range_sval(tmp_left
, comparison
, tmp_right
))
673 } END_FOR_EACH_PTR(tmp_right
);
674 } END_FOR_EACH_PTR(tmp_left
);
678 int possibly_true_rl(struct range_list
*left_ranges
, int comparison
, struct range_list
*right_ranges
)
680 struct data_range
*left_tmp
, *right_tmp
;
682 if (!left_ranges
|| !right_ranges
)
685 FOR_EACH_PTR(left_ranges
, left_tmp
) {
686 FOR_EACH_PTR(right_ranges
, right_tmp
) {
687 if (true_comparison_range(left_tmp
, comparison
, right_tmp
))
689 } END_FOR_EACH_PTR(right_tmp
);
690 } END_FOR_EACH_PTR(left_tmp
);
694 int possibly_false_rl(struct range_list
*left_ranges
, int comparison
, struct range_list
*right_ranges
)
696 struct data_range
*left_tmp
, *right_tmp
;
698 if (!left_ranges
|| !right_ranges
)
701 FOR_EACH_PTR(left_ranges
, left_tmp
) {
702 FOR_EACH_PTR(right_ranges
, right_tmp
) {
703 if (false_comparison_range_sval(left_tmp
, comparison
, right_tmp
))
705 } END_FOR_EACH_PTR(right_tmp
);
706 } END_FOR_EACH_PTR(left_tmp
);
710 /* FIXME: the _rl here stands for right left so really it should be _lr */
711 int possibly_true_rl_LR(int comparison
, struct range_list
*a
, struct range_list
*b
, int left
)
714 return possibly_true_rl(a
, comparison
, b
);
716 return possibly_true_rl(b
, comparison
, a
);
719 int possibly_false_rl_LR(int comparison
, struct range_list
*a
, struct range_list
*b
, int left
)
722 return possibly_false_rl(a
, comparison
, b
);
724 return possibly_false_rl(b
, comparison
, a
);
727 void tack_on(struct range_list
**list
, struct data_range
*drange
)
729 add_ptr_list(list
, drange
);
732 void push_rl(struct range_list_stack
**rl_stack
, struct range_list
*rl
)
734 add_ptr_list(rl_stack
, rl
);
737 struct range_list
*pop_rl(struct range_list_stack
**rl_stack
)
739 struct range_list
*rl
;
741 rl
= last_ptr_list((struct ptr_list
*)*rl_stack
);
742 delete_ptr_list_last((struct ptr_list
**)rl_stack
);
746 struct range_list
*top_rl(struct range_list_stack
*rl_stack
)
748 struct range_list
*rl
;
750 rl
= last_ptr_list((struct ptr_list
*)rl_stack
);
754 void filter_top_rl(struct range_list_stack
**rl_stack
, sval_t sval
)
756 struct range_list
*rl
;
758 rl
= pop_rl(rl_stack
);
759 rl
= remove_range(rl
, sval
, sval
);
760 push_rl(rl_stack
, rl
);
763 static int sval_too_big(struct symbol
*type
, sval_t sval
)
765 if (type_bits(type
) == 64)
767 if (sval
.uvalue
> ((1ULL << type_bits(type
)) - 1))
772 static void add_range_t(struct symbol
*type
, struct range_list
**rl
, sval_t min
, sval_t max
)
774 /* If we're just adding a number, cast it and add it */
775 if (sval_cmp(min
, max
) == 0) {
776 add_range(rl
, sval_cast(type
, min
), sval_cast(type
, max
));
780 /* If the range is within the type range then add it */
781 if (sval_fits(type
, min
) && sval_fits(type
, max
)) {
782 add_range(rl
, sval_cast(type
, min
), sval_cast(type
, max
));
787 * If the range we are adding has more bits than the range type then
788 * add the whole range type. Eg:
789 * 0x8000000000000000 - 0xf000000000000000 -> cast to int
790 * This isn't totally the right thing to do. We could be more granular.
792 if (sval_too_big(type
, min
) || sval_too_big(type
, max
)) {
793 add_range(rl
, sval_type_min(type
), sval_type_max(type
));
797 /* Cast negative values to high positive values */
798 if (sval_is_negative(min
) && type_unsigned(type
)) {
799 if (sval_is_positive(max
)) {
800 if (sval_too_high(type
, max
)) {
801 add_range(rl
, sval_type_min(type
), sval_type_max(type
));
804 add_range(rl
, sval_type_val(type
, 0), sval_cast(type
, max
));
805 max
= sval_type_max(type
);
807 max
= sval_cast(type
, max
);
809 min
= sval_cast(type
, min
);
810 add_range(rl
, min
, max
);
813 /* Cast high positive numbers to negative */
814 if (sval_unsigned(max
) && sval_is_negative(sval_cast(type
, max
))) {
815 if (!sval_is_negative(sval_cast(type
, min
))) {
816 add_range(rl
, sval_cast(type
, min
), sval_type_max(type
));
817 min
= sval_type_min(type
);
819 min
= sval_cast(type
, min
);
821 max
= sval_cast(type
, max
);
822 add_range(rl
, min
, max
);
828 struct range_list
*rl_truncate_cast(struct symbol
*type
, struct range_list
*rl
)
830 struct data_range
*tmp
;
831 struct range_list
*ret
= NULL
;
837 if (!type
|| type
== rl_type(rl
))
840 FOR_EACH_PTR(rl
, tmp
) {
843 if (type_bits(type
) < type_bits(rl_type(rl
))) {
844 min
.uvalue
= tmp
->min
.uvalue
& ((1ULL << type_bits(type
)) - 1);
845 max
.uvalue
= tmp
->max
.uvalue
& ((1ULL << type_bits(type
)) - 1);
847 if (sval_cmp(min
, max
) > 0) {
848 min
= sval_cast(type
, min
);
849 max
= sval_cast(type
, max
);
851 add_range_t(type
, &ret
, min
, max
);
852 } END_FOR_EACH_PTR(tmp
);
857 static int rl_is_sane(struct range_list
*rl
)
859 struct data_range
*tmp
;
863 FOR_EACH_PTR(rl
, tmp
) {
864 if (!sval_fits(type
, tmp
->min
))
866 if (!sval_fits(type
, tmp
->max
))
868 if (sval_cmp(tmp
->min
, tmp
->max
) > 0)
870 } END_FOR_EACH_PTR(tmp
);
875 static int rl_type_consistent(struct range_list
*rl
)
877 struct data_range
*tmp
;
881 FOR_EACH_PTR(rl
, tmp
) {
882 if (type
!= tmp
->min
.type
|| type
!= tmp
->max
.type
)
884 } END_FOR_EACH_PTR(tmp
);
888 struct range_list
*cast_rl(struct symbol
*type
, struct range_list
*rl
)
890 struct data_range
*tmp
;
891 struct range_list
*ret
= NULL
;
899 return alloc_whole_rl(type
);
900 if (type
== rl_type(rl
) && rl_type_consistent(rl
))
903 FOR_EACH_PTR(rl
, tmp
) {
904 add_range_t(type
, &ret
, tmp
->min
, tmp
->max
);
905 } END_FOR_EACH_PTR(tmp
);
908 return alloc_whole_rl(type
);
913 struct range_list
*rl_invert(struct range_list
*orig
)
915 struct range_list
*ret
= NULL
;
916 struct data_range
*tmp
;
917 sval_t gap_min
, abs_max
, sval
;
922 gap_min
= sval_type_min(rl_min(orig
).type
);
923 abs_max
= sval_type_max(rl_max(orig
).type
);
925 FOR_EACH_PTR(orig
, tmp
) {
926 if (sval_cmp(tmp
->min
, gap_min
) > 0) {
927 sval
= sval_type_val(tmp
->min
.type
, tmp
->min
.value
- 1);
928 add_range(&ret
, gap_min
, sval
);
930 gap_min
= sval_type_val(tmp
->max
.type
, tmp
->max
.value
+ 1);
931 if (sval_cmp(tmp
->max
, abs_max
) == 0)
933 } END_FOR_EACH_PTR(tmp
);
935 if (sval_cmp(gap_min
, abs_max
) < 0)
936 add_range(&ret
, gap_min
, abs_max
);
941 struct range_list
*rl_filter(struct range_list
*rl
, struct range_list
*filter
)
943 struct data_range
*tmp
;
945 FOR_EACH_PTR(filter
, tmp
) {
946 rl
= remove_range(rl
, tmp
->min
, tmp
->max
);
947 } END_FOR_EACH_PTR(tmp
);
952 struct range_list
*rl_intersection(struct range_list
*one
, struct range_list
*two
)
956 two
= rl_invert(two
);
957 return rl_filter(one
, two
);
960 void free_rl(struct range_list
**rlist
)
962 __free_ptr_list((struct ptr_list
**)rlist
);
965 static void free_single_dinfo(struct data_info
*dinfo
)
967 free_rl(&dinfo
->value_ranges
);
970 static void free_dinfos(struct allocation_blob
*blob
)
972 unsigned int size
= sizeof(struct data_info
);
973 unsigned int offset
= 0;
975 while (offset
< blob
->offset
) {
976 free_single_dinfo((struct data_info
*)(blob
->data
+ offset
));
981 void free_data_info_allocs(void)
983 struct allocator_struct
*desc
= &data_info_allocator
;
984 struct allocation_blob
*blob
= desc
->blobs
;
987 desc
->allocations
= 0;
988 desc
->total_bytes
= 0;
989 desc
->useful_bytes
= 0;
990 desc
->freelist
= NULL
;
992 struct allocation_blob
*next
= blob
->next
;
994 blob_free(blob
, desc
->chunking
);
997 clear_data_range_alloc();