user_data: compat syscall parameters are user data
[smatch.git] / smatch_ranges.c
blobd2a702407af5c0846364d14d7519f1969b5aa059
1 /*
2 * sparse/smatch_ranges.c
4 * Copyright (C) 2009 Dan Carpenter.
6 * Licensed under the Open Software License version 1.1
8 */
10 #include "parse.h"
11 #include "smatch.h"
12 #include "smatch_extra.h"
13 #include "smatch_slist.h"
15 ALLOCATOR(data_info, "smatch extra data");
16 ALLOCATOR(data_range, "data range");
17 __DO_ALLOCATOR(struct data_range, sizeof(struct data_range), __alignof__(struct data_range),
18 "permanent ranges", perm_data_range);
20 char *show_rl(struct range_list *list)
22 struct data_range *tmp;
23 char full[256];
24 int i = 0;
26 full[0] = '\0';
27 full[255] = '\0';
28 FOR_EACH_PTR(list, tmp) {
29 if (i++)
30 strncat(full, ",", 254 - strlen(full));
31 if (sval_cmp(tmp->min, tmp->max) == 0) {
32 strncat(full, sval_to_str(tmp->min), 254 - strlen(full));
33 continue;
35 strncat(full, sval_to_str(tmp->min), 254 - strlen(full));
36 strncat(full, "-", 254 - strlen(full));
37 strncat(full, sval_to_str(tmp->max), 254 - strlen(full));
38 } END_FOR_EACH_PTR(tmp);
39 return alloc_sname(full);
42 static sval_t parse_val(struct symbol *type, char *c, char **endp)
44 char *start = c;
45 sval_t ret;
47 if (!strncmp(start, "max", 3)) {
48 ret = sval_type_max(type);
49 c += 3;
50 } else if (!strncmp(start, "u64max", 6)) {
51 ret = sval_type_val(type, ULLONG_MAX);
52 c += 6;
53 } else if (!strncmp(start, "s64max", 6)) {
54 ret = sval_type_val(type, LLONG_MAX);
55 c += 6;
56 } else if (!strncmp(start, "u32max", 6)) {
57 ret = sval_type_val(type, UINT_MAX);
58 c += 6;
59 } else if (!strncmp(start, "s32max", 6)) {
60 ret = sval_type_val(type, INT_MAX);
61 c += 6;
62 } else if (!strncmp(start, "u16max", 6)) {
63 ret = sval_type_val(type, USHRT_MAX);
64 c += 6;
65 } else if (!strncmp(start, "s16max", 6)) {
66 ret = sval_type_val(type, SHRT_MAX);
67 c += 6;
68 } else if (!strncmp(start, "min", 3)) {
69 ret = sval_type_min(type);
70 c += 3;
71 } else if (!strncmp(start, "s64min", 6)) {
72 ret = sval_type_val(type, LLONG_MIN);
73 c += 6;
74 } else if (!strncmp(start, "s32min", 6)) {
75 ret = sval_type_val(type, INT_MIN);
76 c += 6;
77 } else if (!strncmp(start, "s16min", 6)) {
78 ret = sval_type_val(type, SHRT_MIN);
79 c += 6;
80 } else {
81 ret = sval_type_val(type, strtoll(start, &c, 10));
83 *endp = c;
84 return ret;
87 void str_to_rl(struct symbol *type, char *value, struct range_list **rl)
89 sval_t min, max;
90 char *c;
92 if (!type)
93 type = &llong_ctype;
94 *rl = NULL;
96 if (value && strcmp(value, "empty") == 0)
97 return;
99 c = value;
100 while (*c) {
101 if (*c == '(')
102 c++;
103 min = parse_val(type, c, &c);
104 if (*c == ')')
105 c++;
106 if (!*c) {
107 add_range(rl, min, min);
108 break;
110 if (*c == ',') {
111 add_range(rl, min, min);
112 c++;
113 continue;
115 if (*c != '-') {
116 sm_msg("debug XXX: trouble parsing %s ", value);
117 break;
119 c++;
120 if (*c == '(')
121 c++;
122 max = parse_val(type, c, &c);
123 add_range(rl, min, max);
124 if (*c == ')')
125 c++;
126 if (!*c)
127 break;
128 if (*c != ',') {
129 sm_msg("debug YYY: trouble parsing %s %s", value, c);
130 break;
132 c++;
135 *rl = cast_rl(type, *rl);
138 int is_whole_rl(struct range_list *rl)
140 struct data_range *drange;
142 if (ptr_list_empty(rl))
143 return 1;
144 drange = first_ptr_list((struct ptr_list *)rl);
145 if (sval_is_min(drange->min) && sval_is_max(drange->max))
146 return 1;
147 return 0;
150 sval_t rl_min(struct range_list *rl)
152 struct data_range *drange;
153 sval_t ret;
155 ret.type = &llong_ctype;
156 ret.value = LLONG_MIN;
157 if (ptr_list_empty(rl))
158 return ret;
159 drange = first_ptr_list((struct ptr_list *)rl);
160 return drange->min;
163 sval_t rl_max(struct range_list *rl)
165 struct data_range *drange;
166 sval_t ret;
168 ret.type = &llong_ctype;
169 ret.value = LLONG_MAX;
170 if (ptr_list_empty(rl))
171 return ret;
172 drange = last_ptr_list((struct ptr_list *)rl);
173 return drange->max;
176 struct symbol *rl_type(struct range_list *rl)
178 if (!rl)
179 return NULL;
180 return rl_min(rl).type;
183 static struct data_range *alloc_range_helper_sval(sval_t min, sval_t max, int perm)
185 struct data_range *ret;
187 if (perm)
188 ret = __alloc_perm_data_range(0);
189 else
190 ret = __alloc_data_range(0);
191 ret->min = min;
192 ret->max = max;
193 return ret;
196 struct data_range *alloc_range(sval_t min, sval_t max)
198 return alloc_range_helper_sval(min, max, 0);
201 struct data_range *alloc_range_perm(sval_t min, sval_t max)
203 return alloc_range_helper_sval(min, max, 1);
206 struct range_list *alloc_rl(sval_t min, sval_t max)
208 struct range_list *rl = NULL;
210 if (sval_cmp(min, max) > 0)
211 return alloc_whole_rl(min.type);
213 add_range(&rl, min, max);
214 return rl;
217 struct range_list *alloc_whole_rl(struct symbol *type)
219 if (!type || type_positive_bits(type) < 0)
220 type = &llong_ctype;
222 return alloc_rl(sval_type_min(type), sval_type_max(type));
225 void add_range(struct range_list **list, sval_t min, sval_t max)
227 struct data_range *tmp = NULL;
228 struct data_range *new = NULL;
229 int check_next = 0;
232 * FIXME: This has a problem merging a range_list like: min-0,3-max
233 * with a range like 1-2. You end up with min-2,3-max instead of
234 * just min-max.
236 FOR_EACH_PTR(*list, tmp) {
237 if (check_next) {
238 /* Sometimes we overlap with more than one range
239 so we have to delete or modify the next range. */
240 if (max.value + 1 == tmp->min.value) {
241 /* join 2 ranges here */
242 new->max = tmp->max;
243 DELETE_CURRENT_PTR(tmp);
244 return;
247 /* Doesn't overlap with the next one. */
248 if (sval_cmp(max, tmp->min) < 0)
249 return;
250 /* Partially overlaps with the next one. */
251 if (sval_cmp(max, tmp->max) < 0) {
252 tmp->min.value = max.value + 1;
253 return;
255 /* Completely overlaps with the next one. */
256 if (sval_cmp(max, tmp->max) >= 0) {
257 DELETE_CURRENT_PTR(tmp);
258 /* there could be more ranges to delete */
259 continue;
262 if (!sval_is_max(max) && max.value + 1 == tmp->min.value) {
263 /* join 2 ranges into a big range */
264 new = alloc_range(min, tmp->max);
265 REPLACE_CURRENT_PTR(tmp, new);
266 return;
268 if (sval_cmp(max, tmp->min) < 0) { /* new range entirely below */
269 new = alloc_range(min, max);
270 INSERT_CURRENT(new, tmp);
271 return;
273 if (sval_cmp(min, tmp->min) < 0) { /* new range partially below */
274 if (sval_cmp(max, tmp->max) < 0)
275 max = tmp->max;
276 else
277 check_next = 1;
278 new = alloc_range(min, max);
279 REPLACE_CURRENT_PTR(tmp, new);
280 if (!check_next)
281 return;
282 continue;
284 if (sval_cmp(max, tmp->max) <= 0) /* new range already included */
285 return;
286 if (sval_cmp(min, tmp->max) <= 0) { /* new range partially above */
287 min = tmp->min;
288 new = alloc_range(min, max);
289 REPLACE_CURRENT_PTR(tmp, new);
290 check_next = 1;
291 continue;
293 if (!sval_is_min(min) && min.value - 1 == tmp->max.value) {
294 /* join 2 ranges into a big range */
295 new = alloc_range(tmp->min, max);
296 REPLACE_CURRENT_PTR(tmp, new);
297 check_next = 1;
298 continue;
300 /* the new range is entirely above the existing ranges */
301 } END_FOR_EACH_PTR(tmp);
302 if (check_next)
303 return;
304 new = alloc_range(min, max);
305 add_ptr_list(list, new);
308 struct range_list *clone_rl(struct range_list *list)
310 struct data_range *tmp;
311 struct range_list *ret = NULL;
313 FOR_EACH_PTR(list, tmp) {
314 add_ptr_list(&ret, tmp);
315 } END_FOR_EACH_PTR(tmp);
316 return ret;
319 struct range_list *clone_rl_permanent(struct range_list *list)
321 struct data_range *tmp;
322 struct data_range *new;
323 struct range_list *ret = NULL;
325 FOR_EACH_PTR(list, tmp) {
326 new = alloc_range_perm(tmp->min, tmp->max);
327 add_ptr_list(&ret, new);
328 } END_FOR_EACH_PTR(tmp);
329 return ret;
332 struct range_list *rl_union(struct range_list *one, struct range_list *two)
334 struct data_range *tmp;
335 struct range_list *ret = NULL;
337 FOR_EACH_PTR(one, tmp) {
338 add_range(&ret, tmp->min, tmp->max);
339 } END_FOR_EACH_PTR(tmp);
340 FOR_EACH_PTR(two, tmp) {
341 add_range(&ret, tmp->min, tmp->max);
342 } END_FOR_EACH_PTR(tmp);
343 return ret;
346 struct range_list *remove_range(struct range_list *list, sval_t min, sval_t max)
348 struct data_range *tmp;
349 struct range_list *ret = NULL;
351 FOR_EACH_PTR(list, tmp) {
352 if (sval_cmp(tmp->max, min) < 0) {
353 add_range(&ret, tmp->min, tmp->max);
354 continue;
356 if (sval_cmp(tmp->min, max) > 0) {
357 add_range(&ret, tmp->min, tmp->max);
358 continue;
360 if (sval_cmp(tmp->min, min) >= 0 && sval_cmp(tmp->max, max) <= 0)
361 continue;
362 if (sval_cmp(tmp->min, min) >= 0) {
363 max.value++;
364 add_range(&ret, max, tmp->max);
365 } else if (sval_cmp(tmp->max, max) <= 0) {
366 min.value--;
367 add_range(&ret, tmp->min, min);
368 } else {
369 min.value--;
370 max.value++;
371 add_range(&ret, tmp->min, min);
372 add_range(&ret, max, tmp->max);
374 } END_FOR_EACH_PTR(tmp);
375 return ret;
378 int ranges_equiv(struct data_range *one, struct data_range *two)
380 if (!one && !two)
381 return 1;
382 if (!one || !two)
383 return 0;
384 if (sval_cmp(one->min, two->min) != 0)
385 return 0;
386 if (sval_cmp(one->max, two->max) != 0)
387 return 0;
388 return 1;
391 int rl_equiv(struct range_list *one, struct range_list *two)
393 struct data_range *one_range;
394 struct data_range *two_range;
396 if (one == two)
397 return 1;
399 PREPARE_PTR_LIST(one, one_range);
400 PREPARE_PTR_LIST(two, two_range);
401 for (;;) {
402 if (!one_range && !two_range)
403 return 1;
404 if (!ranges_equiv(one_range, two_range))
405 return 0;
406 NEXT_PTR_LIST(one_range);
407 NEXT_PTR_LIST(two_range);
409 FINISH_PTR_LIST(two_range);
410 FINISH_PTR_LIST(one_range);
412 return 1;
415 int true_comparison_range(struct data_range *left, int comparison, struct data_range *right)
417 switch (comparison) {
418 case '<':
419 case SPECIAL_UNSIGNED_LT:
420 if (sval_cmp(left->min, right->max) < 0)
421 return 1;
422 return 0;
423 case SPECIAL_UNSIGNED_LTE:
424 case SPECIAL_LTE:
425 if (sval_cmp(left->min, right->max) <= 0)
426 return 1;
427 return 0;
428 case SPECIAL_EQUAL:
429 if (sval_cmp(left->max, right->min) < 0)
430 return 0;
431 if (sval_cmp(left->min, right->max) > 0)
432 return 0;
433 return 1;
434 case SPECIAL_UNSIGNED_GTE:
435 case SPECIAL_GTE:
436 if (sval_cmp(left->max, right->min) >= 0)
437 return 1;
438 return 0;
439 case '>':
440 case SPECIAL_UNSIGNED_GT:
441 if (sval_cmp(left->max, right->min) > 0)
442 return 1;
443 return 0;
444 case SPECIAL_NOTEQUAL:
445 if (sval_cmp(left->min, left->max) != 0)
446 return 1;
447 if (sval_cmp(right->min, right->max) != 0)
448 return 1;
449 if (sval_cmp(left->min, right->min) != 0)
450 return 1;
451 return 0;
452 default:
453 sm_msg("unhandled comparison %d\n", comparison);
454 return 0;
456 return 0;
459 int true_comparison_range_LR(int comparison, struct data_range *var, struct data_range *val, int left)
461 if (left)
462 return true_comparison_range(var, comparison, val);
463 else
464 return true_comparison_range(val, comparison, var);
467 static int false_comparison_range_sval(struct data_range *left, int comparison, struct data_range *right)
469 switch (comparison) {
470 case '<':
471 case SPECIAL_UNSIGNED_LT:
472 if (sval_cmp(left->max, right->min) >= 0)
473 return 1;
474 return 0;
475 case SPECIAL_UNSIGNED_LTE:
476 case SPECIAL_LTE:
477 if (sval_cmp(left->max, right->min) > 0)
478 return 1;
479 return 0;
480 case SPECIAL_EQUAL:
481 if (sval_cmp(left->min, left->max) != 0)
482 return 1;
483 if (sval_cmp(right->min, right->max) != 0)
484 return 1;
485 if (sval_cmp(left->min, right->min) != 0)
486 return 1;
487 return 0;
488 case SPECIAL_UNSIGNED_GTE:
489 case SPECIAL_GTE:
490 if (sval_cmp(left->min, right->max) < 0)
491 return 1;
492 return 0;
493 case '>':
494 case SPECIAL_UNSIGNED_GT:
495 if (sval_cmp(left->min, right->max) <= 0)
496 return 1;
497 return 0;
498 case SPECIAL_NOTEQUAL:
499 if (sval_cmp(left->max, right->min) < 0)
500 return 0;
501 if (sval_cmp(left->min, right->max) > 0)
502 return 0;
503 return 1;
504 default:
505 sm_msg("unhandled comparison %d\n", comparison);
506 return 0;
508 return 0;
511 int false_comparison_range_LR(int comparison, struct data_range *var, struct data_range *val, int left)
513 if (left)
514 return false_comparison_range_sval(var, comparison, val);
515 else
516 return false_comparison_range_sval(val, comparison, var);
519 int possibly_true(struct expression *left, int comparison, struct expression *right)
521 struct range_list *rl_left, *rl_right;
522 struct data_range *tmp_left, *tmp_right;
524 if (!get_implied_rl(left, &rl_left))
525 return 1;
526 if (!get_implied_rl(right, &rl_right))
527 return 1;
529 FOR_EACH_PTR(rl_left, tmp_left) {
530 FOR_EACH_PTR(rl_right, tmp_right) {
531 if (true_comparison_range(tmp_left, comparison, tmp_right))
532 return 1;
533 } END_FOR_EACH_PTR(tmp_right);
534 } END_FOR_EACH_PTR(tmp_left);
535 return 0;
538 int possibly_false(struct expression *left, int comparison, struct expression *right)
540 struct range_list *rl_left, *rl_right;
541 struct data_range *tmp_left, *tmp_right;
543 if (!get_implied_rl(left, &rl_left))
544 return 1;
545 if (!get_implied_rl(right, &rl_right))
546 return 1;
548 FOR_EACH_PTR(rl_left, tmp_left) {
549 FOR_EACH_PTR(rl_right, tmp_right) {
550 if (false_comparison_range_sval(tmp_left, comparison, tmp_right))
551 return 1;
552 } END_FOR_EACH_PTR(tmp_right);
553 } END_FOR_EACH_PTR(tmp_left);
554 return 0;
557 int possibly_true_rl(struct range_list *left_ranges, int comparison, struct range_list *right_ranges)
559 struct data_range *left_tmp, *right_tmp;
561 if (!left_ranges || !right_ranges)
562 return 1;
564 FOR_EACH_PTR(left_ranges, left_tmp) {
565 FOR_EACH_PTR(right_ranges, right_tmp) {
566 if (true_comparison_range(left_tmp, comparison, right_tmp))
567 return 1;
568 } END_FOR_EACH_PTR(right_tmp);
569 } END_FOR_EACH_PTR(left_tmp);
570 return 0;
573 int possibly_false_rl(struct range_list *left_ranges, int comparison, struct range_list *right_ranges)
575 struct data_range *left_tmp, *right_tmp;
577 if (!left_ranges || !right_ranges)
578 return 1;
580 FOR_EACH_PTR(left_ranges, left_tmp) {
581 FOR_EACH_PTR(right_ranges, right_tmp) {
582 if (false_comparison_range_sval(left_tmp, comparison, right_tmp))
583 return 1;
584 } END_FOR_EACH_PTR(right_tmp);
585 } END_FOR_EACH_PTR(left_tmp);
586 return 0;
589 /* FIXME: the _rl here stands for right left so really it should be _lr */
590 int possibly_true_rl_LR(int comparison, struct range_list *a, struct range_list *b, int left)
592 if (left)
593 return possibly_true_rl(a, comparison, b);
594 else
595 return possibly_true_rl(b, comparison, a);
598 int possibly_false_rl_LR(int comparison, struct range_list *a, struct range_list *b, int left)
600 if (left)
601 return possibly_false_rl(a, comparison, b);
602 else
603 return possibly_false_rl(b, comparison, a);
606 void tack_on(struct range_list **list, struct data_range *drange)
608 add_ptr_list(list, drange);
611 void push_rl(struct range_list_stack **rl_stack, struct range_list *rl)
613 add_ptr_list(rl_stack, rl);
616 struct range_list *pop_rl(struct range_list_stack **rl_stack)
618 struct range_list *rl;
620 rl = last_ptr_list((struct ptr_list *)*rl_stack);
621 delete_ptr_list_last((struct ptr_list **)rl_stack);
622 return rl;
625 struct range_list *top_rl(struct range_list_stack *rl_stack)
627 struct range_list *rl;
629 rl = last_ptr_list((struct ptr_list *)rl_stack);
630 return rl;
633 void filter_top_rl(struct range_list_stack **rl_stack, sval_t sval)
635 struct range_list *rl;
637 rl = pop_rl(rl_stack);
638 rl = remove_range(rl, sval, sval);
639 push_rl(rl_stack, rl);
642 static int sval_too_big(struct symbol *type, sval_t sval)
644 if (type_bits(type) == 64)
645 return 0;
646 if (sval.uvalue > ((1ULL << type_bits(type)) - 1))
647 return 1;
648 return 0;
651 static void add_range_t(struct symbol *type, struct range_list **rl, sval_t min, sval_t max)
653 /* If we're just adding a number, cast it and add it */
654 if (sval_cmp(min, max) == 0) {
655 add_range(rl, sval_cast(type, min), sval_cast(type, max));
656 return;
659 /* If the range is within the type range then add it */
660 if (sval_fits(type, min) && sval_fits(type, max)) {
661 add_range(rl, sval_cast(type, min), sval_cast(type, max));
662 return;
666 * If the range we are adding has more bits than the range type then
667 * add the whole range type. Eg:
668 * 0x8000000000000000 - 0xf000000000000000 -> cast to int
669 * This isn't totally the right thing to do. We could be more granular.
671 if (sval_too_big(type, min) || sval_too_big(type, max)) {
672 add_range(rl, sval_type_min(type), sval_type_max(type));
673 return;
676 /* Cast negative values to high positive values */
677 if (sval_is_negative(min) && type_unsigned(type)) {
678 if (sval_is_positive(max)) {
679 if (sval_too_high(type, max)) {
680 add_range(rl, sval_type_min(type), sval_type_max(type));
681 return;
683 add_range(rl, sval_type_val(type, 0), sval_cast(type, max));
684 max = sval_type_max(type);
685 } else {
686 max = sval_cast(type, max);
688 min = sval_cast(type, min);
689 add_range(rl, min, max);
692 /* Cast high positive numbers to negative */
693 if (sval_unsigned(max) && sval_is_negative(sval_cast(type, max))) {
694 if (!sval_is_negative(sval_cast(type, min))) {
695 add_range(rl, sval_cast(type, min), sval_type_max(type));
696 min = sval_type_min(type);
697 } else {
698 min = sval_cast(type, min);
700 max = sval_cast(type, max);
701 add_range(rl, min, max);
704 return;
707 struct range_list *rl_truncate_cast(struct symbol *type, struct range_list *rl)
709 struct data_range *tmp;
710 struct range_list *ret = NULL;
711 sval_t min, max;
713 if (!rl)
714 return NULL;
716 if (!type || type == rl_type(rl))
717 return rl;
719 FOR_EACH_PTR(rl, tmp) {
720 min = tmp->min;
721 max = tmp->max;
722 if (type_bits(type) < type_bits(rl_type(rl))) {
723 min.uvalue = tmp->min.uvalue & ((1ULL << type_bits(type)) - 1);
724 max.uvalue = tmp->max.uvalue & ((1ULL << type_bits(type)) - 1);
726 if (sval_cmp(min, max) > 0) {
727 min = sval_cast(type, min);
728 max = sval_cast(type, max);
730 add_range_t(type, &ret, min, max);
731 } END_FOR_EACH_PTR(tmp);
733 return ret;
736 static int rl_is_sane(struct range_list *rl)
738 struct data_range *tmp;
739 struct symbol *type;
741 type = rl_type(rl);
742 FOR_EACH_PTR(rl, tmp) {
743 if (!sval_fits(type, tmp->min))
744 return 0;
745 if (!sval_fits(type, tmp->max))
746 return 0;
747 if (sval_cmp(tmp->min, tmp->max) > 0)
748 return 0;
749 } END_FOR_EACH_PTR(tmp);
751 return 1;
754 static int rl_type_consistent(struct range_list *rl)
756 struct data_range *tmp;
757 struct symbol *type;
759 type = rl_type(rl);
760 FOR_EACH_PTR(rl, tmp) {
761 if (type != tmp->min.type || type != tmp->max.type)
762 return 0;
763 } END_FOR_EACH_PTR(tmp);
764 return 1;
767 struct range_list *cast_rl(struct symbol *type, struct range_list *rl)
769 struct data_range *tmp;
770 struct range_list *ret = NULL;
772 if (!rl)
773 return NULL;
775 if (!type)
776 return rl;
777 if (!rl_is_sane(rl))
778 return alloc_whole_rl(type);
779 if (type == rl_type(rl) && rl_type_consistent(rl))
780 return rl;
782 FOR_EACH_PTR(rl, tmp) {
783 add_range_t(type, &ret, tmp->min, tmp->max);
784 } END_FOR_EACH_PTR(tmp);
786 if (!ret)
787 return alloc_whole_rl(type);
789 return ret;
792 struct range_list *rl_invert(struct range_list *orig)
794 struct range_list *ret = NULL;
795 struct data_range *tmp;
796 sval_t gap_min, abs_max, sval;
798 if (!orig)
799 return NULL;
801 gap_min = sval_type_min(rl_min(orig).type);
802 abs_max = sval_type_max(rl_max(orig).type);
804 FOR_EACH_PTR(orig, tmp) {
805 if (sval_cmp(tmp->min, gap_min) > 0) {
806 sval = sval_type_val(tmp->min.type, tmp->min.value - 1);
807 add_range(&ret, gap_min, sval);
809 gap_min = sval_type_val(tmp->max.type, tmp->max.value + 1);
810 if (sval_cmp(tmp->max, abs_max) == 0)
811 gap_min = abs_max;
812 } END_FOR_EACH_PTR(tmp);
814 if (sval_cmp(gap_min, abs_max) < 0)
815 add_range(&ret, gap_min, abs_max);
817 return ret;
820 struct range_list *rl_filter(struct range_list *rl, struct range_list *filter)
822 struct data_range *tmp;
824 FOR_EACH_PTR(filter, tmp) {
825 rl = remove_range(rl, tmp->min, tmp->max);
826 } END_FOR_EACH_PTR(tmp);
828 return rl;
831 struct range_list *rl_intersection(struct range_list *one, struct range_list *two)
833 if (!two)
834 return NULL;
835 two = rl_invert(two);
836 return rl_filter(one, two);
839 void free_rl(struct range_list **rlist)
841 __free_ptr_list((struct ptr_list **)rlist);
844 static void free_single_dinfo(struct data_info *dinfo)
846 free_rl(&dinfo->value_ranges);
849 static void free_dinfos(struct allocation_blob *blob)
851 unsigned int size = sizeof(struct data_info);
852 unsigned int offset = 0;
854 while (offset < blob->offset) {
855 free_single_dinfo((struct data_info *)(blob->data + offset));
856 offset += size;
860 void free_data_info_allocs(void)
862 struct allocator_struct *desc = &data_info_allocator;
863 struct allocation_blob *blob = desc->blobs;
865 desc->blobs = NULL;
866 desc->allocations = 0;
867 desc->total_bytes = 0;
868 desc->useful_bytes = 0;
869 desc->freelist = NULL;
870 while (blob) {
871 struct allocation_blob *next = blob->next;
872 free_dinfos(blob);
873 blob_free(blob, desc->chunking);
874 blob = next;
876 clear_data_range_alloc();