2 * Copyright (C) 2009 Dan Carpenter.
4 * This program is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU General Public License
6 * as published by the Free Software Foundation; either version 2
7 * of the License, or (at your option) any later version.
9 * This program is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 * GNU General Public License for more details.
14 * You should have received a copy of the GNU General Public License
15 * along with this program; if not, see http://www.gnu.org/copyleft/gpl.txt
20 #include "smatch_extra.h"
21 #include "smatch_slist.h"
23 ALLOCATOR(data_info
, "smatch extra data");
24 ALLOCATOR(data_range
, "data range");
25 __DO_ALLOCATOR(struct data_range
, sizeof(struct data_range
), __alignof__(struct data_range
),
26 "permanent ranges", perm_data_range
);
28 char *show_rl(struct range_list
*list
)
30 struct data_range
*tmp
;
36 FOR_EACH_PTR(list
, tmp
) {
38 strncat(full
, ",", 254 - strlen(full
));
39 if (sval_cmp(tmp
->min
, tmp
->max
) == 0) {
40 strncat(full
, sval_to_str(tmp
->min
), 254 - strlen(full
));
43 strncat(full
, sval_to_str(tmp
->min
), 254 - strlen(full
));
44 strncat(full
, "-", 254 - strlen(full
));
45 strncat(full
, sval_to_str(tmp
->max
), 254 - strlen(full
));
46 } END_FOR_EACH_PTR(tmp
);
47 return alloc_sname(full
);
50 static int str_to_comparison_arg_helper(const char *str
,
51 struct expression
*call
, int *comparison
,
52 struct expression
**arg
, char **endp
)
55 char *c
= (char *)str
;
64 *comparison
= SPECIAL_LTE
;
69 } else if (*c
== '=') {
72 *comparison
= SPECIAL_EQUAL
;
73 } else if (*c
== '>') {
76 *comparison
= SPECIAL_GTE
;
89 param
= strtoll(c
, &c
, 10);
90 c
++; /* skip the ']' character */
96 *arg
= get_argument_from_call_expr(call
->args
, param
);
102 int str_to_comparison_arg(const char *str
, struct expression
*call
, int *comparison
, struct expression
**arg
)
111 return str_to_comparison_arg_helper(str
, call
, comparison
, arg
, NULL
);
114 static sval_t
get_val_from_key(int use_max
, struct symbol
*type
, char *c
, struct expression
*call
, char **endp
)
116 struct expression
*arg
;
121 ret
= sval_type_max(type
);
123 ret
= sval_type_min(type
);
125 if (!str_to_comparison_arg_helper(c
, call
, &comparison
, &arg
, endp
))
128 if (use_max
&& get_implied_max(arg
, &tmp
)) {
130 if (comparison
== '<') {
132 ret
= sval_binop(ret
, '-', tmp
);
135 if (!use_max
&& get_implied_min(arg
, &tmp
)) {
137 if (comparison
== '>') {
139 ret
= sval_binop(ret
, '+', tmp
);
146 static sval_t
parse_val(int use_max
, struct expression
*call
, struct symbol
*type
, char *c
, char **endp
)
151 if (!strncmp(start
, "max", 3)) {
152 ret
= sval_type_max(type
);
154 } else if (!strncmp(start
, "u64max", 6)) {
155 ret
= sval_type_val(type
, ULLONG_MAX
);
157 } else if (!strncmp(start
, "s64max", 6)) {
158 ret
= sval_type_val(type
, LLONG_MAX
);
160 } else if (!strncmp(start
, "u32max", 6)) {
161 ret
= sval_type_val(type
, UINT_MAX
);
163 } else if (!strncmp(start
, "s32max", 6)) {
164 ret
= sval_type_val(type
, INT_MAX
);
166 } else if (!strncmp(start
, "u16max", 6)) {
167 ret
= sval_type_val(type
, USHRT_MAX
);
169 } else if (!strncmp(start
, "s16max", 6)) {
170 ret
= sval_type_val(type
, SHRT_MAX
);
172 } else if (!strncmp(start
, "min", 3)) {
173 ret
= sval_type_min(type
);
175 } else if (!strncmp(start
, "s64min", 6)) {
176 ret
= sval_type_val(type
, LLONG_MIN
);
178 } else if (!strncmp(start
, "s32min", 6)) {
179 ret
= sval_type_val(type
, INT_MIN
);
181 } else if (!strncmp(start
, "s16min", 6)) {
182 ret
= sval_type_val(type
, SHRT_MIN
);
184 } else if (start
[0] == '[') {
185 /* this parses [==p0] comparisons */
186 ret
= get_val_from_key(1, type
, start
, call
, &c
);
188 ret
= sval_type_val(type
, strtoll(start
, &c
, 10));
194 static char *jump_to_call_math(char *value
)
198 while (*c
&& *c
!= '[')
204 if (*c
== '<' || *c
== '=' || *c
== '>')
210 static void str_to_rl_helper(struct expression
*call
, struct symbol
*type
, char *value
, struct range_list
**rl
)
212 sval_t min
, max
, arg_max
, val
;
220 if (strcmp(value
, "empty") == 0)
223 if (strncmp(value
, "[==$", 4) == 0) {
224 struct expression
*arg
;
227 if (!str_to_comparison_arg(value
, call
, &comparison
, &arg
))
229 get_implied_rl(arg
, rl
);
233 call_math
= jump_to_call_math(value
);
234 if (call_math
&& parse_call_math(call
, call_math
, &val
)) {
235 *rl
= alloc_rl(val
, val
);
243 min
= parse_val(0, call
, type
, c
, &c
);
246 if (*c
== '\0' || *c
== '[') {
247 add_range(rl
, min
, min
);
251 add_range(rl
, min
, min
);
256 sm_msg("debug XXX: trouble parsing %s ", value
);
262 max
= parse_val(1, call
, type
, c
, &c
);
266 if (jump_to_call_math(c
) == c
+ 1) {
267 add_range(rl
, min
, max
);
270 arg_max
= get_val_from_key(1, type
, c
, call
, &c
);
271 if (sval_cmp(arg_max
, max
) < 0)
274 add_range(rl
, min
, max
);
278 sm_msg("debug YYY: trouble parsing call = '%s' value = '%s' remaining = '%s'",
279 expr_to_str(call
), value
, c
);
286 *rl
= cast_rl(type
, *rl
);
289 void str_to_rl(struct symbol
*type
, char *value
, struct range_list
**rl
)
291 return str_to_rl_helper(NULL
, type
, value
, rl
);
294 void call_results_to_rl(struct expression
*expr
, struct symbol
*type
, char *value
, struct range_list
**rl
)
296 return str_to_rl_helper(strip_expr(expr
), type
, value
, rl
);
299 int is_whole_rl(struct range_list
*rl
)
301 struct data_range
*drange
;
303 if (ptr_list_empty(rl
))
305 drange
= first_ptr_list((struct ptr_list
*)rl
);
306 if (sval_is_min(drange
->min
) && sval_is_max(drange
->max
))
311 sval_t
rl_min(struct range_list
*rl
)
313 struct data_range
*drange
;
316 ret
.type
= &llong_ctype
;
317 ret
.value
= LLONG_MIN
;
318 if (ptr_list_empty(rl
))
320 drange
= first_ptr_list((struct ptr_list
*)rl
);
324 sval_t
rl_max(struct range_list
*rl
)
326 struct data_range
*drange
;
329 ret
.type
= &llong_ctype
;
330 ret
.value
= LLONG_MAX
;
331 if (ptr_list_empty(rl
))
333 drange
= last_ptr_list((struct ptr_list
*)rl
);
337 int rl_to_sval(struct range_list
*rl
, sval_t
*sval
)
346 if (sval_cmp(min
, max
) != 0)
352 struct symbol
*rl_type(struct range_list
*rl
)
356 return rl_min(rl
).type
;
359 static struct data_range
*alloc_range_helper_sval(sval_t min
, sval_t max
, int perm
)
361 struct data_range
*ret
;
364 ret
= __alloc_perm_data_range(0);
366 ret
= __alloc_data_range(0);
372 struct data_range
*alloc_range(sval_t min
, sval_t max
)
374 return alloc_range_helper_sval(min
, max
, 0);
377 struct data_range
*alloc_range_perm(sval_t min
, sval_t max
)
379 return alloc_range_helper_sval(min
, max
, 1);
382 struct range_list
*alloc_rl(sval_t min
, sval_t max
)
384 struct range_list
*rl
= NULL
;
386 if (sval_cmp(min
, max
) > 0)
387 return alloc_whole_rl(min
.type
);
389 add_range(&rl
, min
, max
);
393 struct range_list
*alloc_whole_rl(struct symbol
*type
)
395 if (!type
|| type_positive_bits(type
) < 0)
397 if (type
->type
== SYM_ARRAY
)
400 return alloc_rl(sval_type_min(type
), sval_type_max(type
));
403 void add_range(struct range_list
**list
, sval_t min
, sval_t max
)
405 struct data_range
*tmp
= NULL
;
406 struct data_range
*new = NULL
;
410 * FIXME: This has a problem merging a range_list like: min-0,3-max
411 * with a range like 1-2. You end up with min-2,3-max instead of
414 FOR_EACH_PTR(*list
, tmp
) {
416 /* Sometimes we overlap with more than one range
417 so we have to delete or modify the next range. */
418 if (max
.value
+ 1 == tmp
->min
.value
) {
419 /* join 2 ranges here */
421 DELETE_CURRENT_PTR(tmp
);
425 /* Doesn't overlap with the next one. */
426 if (sval_cmp(max
, tmp
->min
) < 0)
428 /* Partially overlaps with the next one. */
429 if (sval_cmp(max
, tmp
->max
) < 0) {
430 tmp
->min
.value
= max
.value
+ 1;
433 /* Completely overlaps with the next one. */
434 if (sval_cmp(max
, tmp
->max
) >= 0) {
435 DELETE_CURRENT_PTR(tmp
);
436 /* there could be more ranges to delete */
440 if (!sval_is_max(max
) && max
.value
+ 1 == tmp
->min
.value
) {
441 /* join 2 ranges into a big range */
442 new = alloc_range(min
, tmp
->max
);
443 REPLACE_CURRENT_PTR(tmp
, new);
446 if (sval_cmp(max
, tmp
->min
) < 0) { /* new range entirely below */
447 new = alloc_range(min
, max
);
448 INSERT_CURRENT(new, tmp
);
451 if (sval_cmp(min
, tmp
->min
) < 0) { /* new range partially below */
452 if (sval_cmp(max
, tmp
->max
) < 0)
456 new = alloc_range(min
, max
);
457 REPLACE_CURRENT_PTR(tmp
, new);
462 if (sval_cmp(max
, tmp
->max
) <= 0) /* new range already included */
464 if (sval_cmp(min
, tmp
->max
) <= 0) { /* new range partially above */
466 new = alloc_range(min
, max
);
467 REPLACE_CURRENT_PTR(tmp
, new);
471 if (!sval_is_min(min
) && min
.value
- 1 == tmp
->max
.value
) {
472 /* join 2 ranges into a big range */
473 new = alloc_range(tmp
->min
, max
);
474 REPLACE_CURRENT_PTR(tmp
, new);
478 /* the new range is entirely above the existing ranges */
479 } END_FOR_EACH_PTR(tmp
);
482 new = alloc_range(min
, max
);
483 add_ptr_list(list
, new);
486 struct range_list
*clone_rl(struct range_list
*list
)
488 struct data_range
*tmp
;
489 struct range_list
*ret
= NULL
;
491 FOR_EACH_PTR(list
, tmp
) {
492 add_ptr_list(&ret
, tmp
);
493 } END_FOR_EACH_PTR(tmp
);
497 struct range_list
*clone_rl_permanent(struct range_list
*list
)
499 struct data_range
*tmp
;
500 struct data_range
*new;
501 struct range_list
*ret
= NULL
;
503 FOR_EACH_PTR(list
, tmp
) {
504 new = alloc_range_perm(tmp
->min
, tmp
->max
);
505 add_ptr_list(&ret
, new);
506 } END_FOR_EACH_PTR(tmp
);
510 struct range_list
*rl_union(struct range_list
*one
, struct range_list
*two
)
512 struct data_range
*tmp
;
513 struct range_list
*ret
= NULL
;
515 FOR_EACH_PTR(one
, tmp
) {
516 add_range(&ret
, tmp
->min
, tmp
->max
);
517 } END_FOR_EACH_PTR(tmp
);
518 FOR_EACH_PTR(two
, tmp
) {
519 add_range(&ret
, tmp
->min
, tmp
->max
);
520 } END_FOR_EACH_PTR(tmp
);
524 struct range_list
*remove_range(struct range_list
*list
, sval_t min
, sval_t max
)
526 struct data_range
*tmp
;
527 struct range_list
*ret
= NULL
;
529 FOR_EACH_PTR(list
, tmp
) {
530 if (sval_cmp(tmp
->max
, min
) < 0) {
531 add_range(&ret
, tmp
->min
, tmp
->max
);
534 if (sval_cmp(tmp
->min
, max
) > 0) {
535 add_range(&ret
, tmp
->min
, tmp
->max
);
538 if (sval_cmp(tmp
->min
, min
) >= 0 && sval_cmp(tmp
->max
, max
) <= 0)
540 if (sval_cmp(tmp
->min
, min
) >= 0) {
542 add_range(&ret
, max
, tmp
->max
);
543 } else if (sval_cmp(tmp
->max
, max
) <= 0) {
545 add_range(&ret
, tmp
->min
, min
);
549 add_range(&ret
, tmp
->min
, min
);
550 add_range(&ret
, max
, tmp
->max
);
552 } END_FOR_EACH_PTR(tmp
);
556 int ranges_equiv(struct data_range
*one
, struct data_range
*two
)
562 if (sval_cmp(one
->min
, two
->min
) != 0)
564 if (sval_cmp(one
->max
, two
->max
) != 0)
569 int rl_equiv(struct range_list
*one
, struct range_list
*two
)
571 struct data_range
*one_range
;
572 struct data_range
*two_range
;
577 PREPARE_PTR_LIST(one
, one_range
);
578 PREPARE_PTR_LIST(two
, two_range
);
580 if (!one_range
&& !two_range
)
582 if (!ranges_equiv(one_range
, two_range
))
584 NEXT_PTR_LIST(one_range
);
585 NEXT_PTR_LIST(two_range
);
587 FINISH_PTR_LIST(two_range
);
588 FINISH_PTR_LIST(one_range
);
593 int true_comparison_range(struct data_range
*left
, int comparison
, struct data_range
*right
)
595 switch (comparison
) {
597 case SPECIAL_UNSIGNED_LT
:
598 if (sval_cmp(left
->min
, right
->max
) < 0)
601 case SPECIAL_UNSIGNED_LTE
:
603 if (sval_cmp(left
->min
, right
->max
) <= 0)
607 if (sval_cmp(left
->max
, right
->min
) < 0)
609 if (sval_cmp(left
->min
, right
->max
) > 0)
612 case SPECIAL_UNSIGNED_GTE
:
614 if (sval_cmp(left
->max
, right
->min
) >= 0)
618 case SPECIAL_UNSIGNED_GT
:
619 if (sval_cmp(left
->max
, right
->min
) > 0)
622 case SPECIAL_NOTEQUAL
:
623 if (sval_cmp(left
->min
, left
->max
) != 0)
625 if (sval_cmp(right
->min
, right
->max
) != 0)
627 if (sval_cmp(left
->min
, right
->min
) != 0)
631 sm_msg("unhandled comparison %d\n", comparison
);
637 int true_comparison_range_LR(int comparison
, struct data_range
*var
, struct data_range
*val
, int left
)
640 return true_comparison_range(var
, comparison
, val
);
642 return true_comparison_range(val
, comparison
, var
);
645 static int false_comparison_range_sval(struct data_range
*left
, int comparison
, struct data_range
*right
)
647 switch (comparison
) {
649 case SPECIAL_UNSIGNED_LT
:
650 if (sval_cmp(left
->max
, right
->min
) >= 0)
653 case SPECIAL_UNSIGNED_LTE
:
655 if (sval_cmp(left
->max
, right
->min
) > 0)
659 if (sval_cmp(left
->min
, left
->max
) != 0)
661 if (sval_cmp(right
->min
, right
->max
) != 0)
663 if (sval_cmp(left
->min
, right
->min
) != 0)
666 case SPECIAL_UNSIGNED_GTE
:
668 if (sval_cmp(left
->min
, right
->max
) < 0)
672 case SPECIAL_UNSIGNED_GT
:
673 if (sval_cmp(left
->min
, right
->max
) <= 0)
676 case SPECIAL_NOTEQUAL
:
677 if (sval_cmp(left
->max
, right
->min
) < 0)
679 if (sval_cmp(left
->min
, right
->max
) > 0)
683 sm_msg("unhandled comparison %d\n", comparison
);
689 int false_comparison_range_LR(int comparison
, struct data_range
*var
, struct data_range
*val
, int left
)
692 return false_comparison_range_sval(var
, comparison
, val
);
694 return false_comparison_range_sval(val
, comparison
, var
);
697 int possibly_true(struct expression
*left
, int comparison
, struct expression
*right
)
699 struct range_list
*rl_left
, *rl_right
;
700 struct data_range
*tmp_left
, *tmp_right
;
702 if (!get_implied_rl(left
, &rl_left
))
704 if (!get_implied_rl(right
, &rl_right
))
707 FOR_EACH_PTR(rl_left
, tmp_left
) {
708 FOR_EACH_PTR(rl_right
, tmp_right
) {
709 if (true_comparison_range(tmp_left
, comparison
, tmp_right
))
711 } END_FOR_EACH_PTR(tmp_right
);
712 } END_FOR_EACH_PTR(tmp_left
);
716 int possibly_false(struct expression
*left
, int comparison
, struct expression
*right
)
718 struct range_list
*rl_left
, *rl_right
;
719 struct data_range
*tmp_left
, *tmp_right
;
721 if (!get_implied_rl(left
, &rl_left
))
723 if (!get_implied_rl(right
, &rl_right
))
726 FOR_EACH_PTR(rl_left
, tmp_left
) {
727 FOR_EACH_PTR(rl_right
, tmp_right
) {
728 if (false_comparison_range_sval(tmp_left
, comparison
, tmp_right
))
730 } END_FOR_EACH_PTR(tmp_right
);
731 } END_FOR_EACH_PTR(tmp_left
);
735 int possibly_true_rl(struct range_list
*left_ranges
, int comparison
, struct range_list
*right_ranges
)
737 struct data_range
*left_tmp
, *right_tmp
;
739 if (!left_ranges
|| !right_ranges
)
742 FOR_EACH_PTR(left_ranges
, left_tmp
) {
743 FOR_EACH_PTR(right_ranges
, right_tmp
) {
744 if (true_comparison_range(left_tmp
, comparison
, right_tmp
))
746 } END_FOR_EACH_PTR(right_tmp
);
747 } END_FOR_EACH_PTR(left_tmp
);
751 int possibly_false_rl(struct range_list
*left_ranges
, int comparison
, struct range_list
*right_ranges
)
753 struct data_range
*left_tmp
, *right_tmp
;
755 if (!left_ranges
|| !right_ranges
)
758 FOR_EACH_PTR(left_ranges
, left_tmp
) {
759 FOR_EACH_PTR(right_ranges
, right_tmp
) {
760 if (false_comparison_range_sval(left_tmp
, comparison
, right_tmp
))
762 } END_FOR_EACH_PTR(right_tmp
);
763 } END_FOR_EACH_PTR(left_tmp
);
767 /* FIXME: the _rl here stands for right left so really it should be _lr */
768 int possibly_true_rl_LR(int comparison
, struct range_list
*a
, struct range_list
*b
, int left
)
771 return possibly_true_rl(a
, comparison
, b
);
773 return possibly_true_rl(b
, comparison
, a
);
776 int possibly_false_rl_LR(int comparison
, struct range_list
*a
, struct range_list
*b
, int left
)
779 return possibly_false_rl(a
, comparison
, b
);
781 return possibly_false_rl(b
, comparison
, a
);
784 int rl_has_sval(struct range_list
*rl
, sval_t sval
)
786 struct data_range
*tmp
;
788 FOR_EACH_PTR(rl
, tmp
) {
789 if (sval_cmp(tmp
->min
, sval
) <= 0 &&
790 sval_cmp(tmp
->max
, sval
) >= 0)
792 } END_FOR_EACH_PTR(tmp
);
796 void tack_on(struct range_list
**list
, struct data_range
*drange
)
798 add_ptr_list(list
, drange
);
801 void push_rl(struct range_list_stack
**rl_stack
, struct range_list
*rl
)
803 add_ptr_list(rl_stack
, rl
);
806 struct range_list
*pop_rl(struct range_list_stack
**rl_stack
)
808 struct range_list
*rl
;
810 rl
= last_ptr_list((struct ptr_list
*)*rl_stack
);
811 delete_ptr_list_last((struct ptr_list
**)rl_stack
);
815 struct range_list
*top_rl(struct range_list_stack
*rl_stack
)
817 struct range_list
*rl
;
819 rl
= last_ptr_list((struct ptr_list
*)rl_stack
);
823 void filter_top_rl(struct range_list_stack
**rl_stack
, sval_t sval
)
825 struct range_list
*rl
;
827 rl
= pop_rl(rl_stack
);
828 rl
= remove_range(rl
, sval
, sval
);
829 push_rl(rl_stack
, rl
);
832 static int sval_too_big(struct symbol
*type
, sval_t sval
)
834 if (type_bits(type
) == 64)
836 if (sval
.uvalue
> ((1ULL << type_bits(type
)) - 1))
841 static void add_range_t(struct symbol
*type
, struct range_list
**rl
, sval_t min
, sval_t max
)
843 /* If we're just adding a number, cast it and add it */
844 if (sval_cmp(min
, max
) == 0) {
845 add_range(rl
, sval_cast(type
, min
), sval_cast(type
, max
));
849 /* If the range is within the type range then add it */
850 if (sval_fits(type
, min
) && sval_fits(type
, max
)) {
851 add_range(rl
, sval_cast(type
, min
), sval_cast(type
, max
));
856 * If the range we are adding has more bits than the range type then
857 * add the whole range type. Eg:
858 * 0x8000000000000000 - 0xf000000000000000 -> cast to int
859 * This isn't totally the right thing to do. We could be more granular.
861 if (sval_too_big(type
, min
) || sval_too_big(type
, max
)) {
862 add_range(rl
, sval_type_min(type
), sval_type_max(type
));
866 /* Cast negative values to high positive values */
867 if (sval_is_negative(min
) && type_unsigned(type
)) {
868 if (sval_is_positive(max
)) {
869 if (sval_too_high(type
, max
)) {
870 add_range(rl
, sval_type_min(type
), sval_type_max(type
));
873 add_range(rl
, sval_type_val(type
, 0), sval_cast(type
, max
));
874 max
= sval_type_max(type
);
876 max
= sval_cast(type
, max
);
878 min
= sval_cast(type
, min
);
879 add_range(rl
, min
, max
);
882 /* Cast high positive numbers to negative */
883 if (sval_unsigned(max
) && sval_is_negative(sval_cast(type
, max
))) {
884 if (!sval_is_negative(sval_cast(type
, min
))) {
885 add_range(rl
, sval_cast(type
, min
), sval_type_max(type
));
886 min
= sval_type_min(type
);
888 min
= sval_cast(type
, min
);
890 max
= sval_cast(type
, max
);
891 add_range(rl
, min
, max
);
897 struct range_list
*rl_truncate_cast(struct symbol
*type
, struct range_list
*rl
)
899 struct data_range
*tmp
;
900 struct range_list
*ret
= NULL
;
906 if (!type
|| type
== rl_type(rl
))
909 FOR_EACH_PTR(rl
, tmp
) {
912 if (type_bits(type
) < type_bits(rl_type(rl
))) {
913 min
.uvalue
= tmp
->min
.uvalue
& ((1ULL << type_bits(type
)) - 1);
914 max
.uvalue
= tmp
->max
.uvalue
& ((1ULL << type_bits(type
)) - 1);
916 if (sval_cmp(min
, max
) > 0) {
917 min
= sval_cast(type
, min
);
918 max
= sval_cast(type
, max
);
920 add_range_t(type
, &ret
, min
, max
);
921 } END_FOR_EACH_PTR(tmp
);
926 static int rl_is_sane(struct range_list
*rl
)
928 struct data_range
*tmp
;
932 FOR_EACH_PTR(rl
, tmp
) {
933 if (!sval_fits(type
, tmp
->min
))
935 if (!sval_fits(type
, tmp
->max
))
937 if (sval_cmp(tmp
->min
, tmp
->max
) > 0)
939 } END_FOR_EACH_PTR(tmp
);
944 static int rl_type_consistent(struct range_list
*rl
)
946 struct data_range
*tmp
;
950 FOR_EACH_PTR(rl
, tmp
) {
951 if (type
!= tmp
->min
.type
|| type
!= tmp
->max
.type
)
953 } END_FOR_EACH_PTR(tmp
);
957 struct range_list
*cast_rl(struct symbol
*type
, struct range_list
*rl
)
959 struct data_range
*tmp
;
960 struct range_list
*ret
= NULL
;
968 return alloc_whole_rl(type
);
969 if (type
== rl_type(rl
) && rl_type_consistent(rl
))
972 FOR_EACH_PTR(rl
, tmp
) {
973 add_range_t(type
, &ret
, tmp
->min
, tmp
->max
);
974 } END_FOR_EACH_PTR(tmp
);
977 return alloc_whole_rl(type
);
982 struct range_list
*rl_invert(struct range_list
*orig
)
984 struct range_list
*ret
= NULL
;
985 struct data_range
*tmp
;
986 sval_t gap_min
, abs_max
, sval
;
991 gap_min
= sval_type_min(rl_min(orig
).type
);
992 abs_max
= sval_type_max(rl_max(orig
).type
);
994 FOR_EACH_PTR(orig
, tmp
) {
995 if (sval_cmp(tmp
->min
, gap_min
) > 0) {
996 sval
= sval_type_val(tmp
->min
.type
, tmp
->min
.value
- 1);
997 add_range(&ret
, gap_min
, sval
);
999 gap_min
= sval_type_val(tmp
->max
.type
, tmp
->max
.value
+ 1);
1000 if (sval_cmp(tmp
->max
, abs_max
) == 0)
1002 } END_FOR_EACH_PTR(tmp
);
1004 if (sval_cmp(gap_min
, abs_max
) < 0)
1005 add_range(&ret
, gap_min
, abs_max
);
1010 struct range_list
*rl_filter(struct range_list
*rl
, struct range_list
*filter
)
1012 struct data_range
*tmp
;
1014 FOR_EACH_PTR(filter
, tmp
) {
1015 rl
= remove_range(rl
, tmp
->min
, tmp
->max
);
1016 } END_FOR_EACH_PTR(tmp
);
1021 struct range_list
*rl_intersection(struct range_list
*one
, struct range_list
*two
)
1025 two
= rl_invert(two
);
1026 return rl_filter(one
, two
);
1029 void free_rl(struct range_list
**rlist
)
1031 __free_ptr_list((struct ptr_list
**)rlist
);
1034 static void free_single_dinfo(struct data_info
*dinfo
)
1036 free_rl(&dinfo
->value_ranges
);
1039 static void free_dinfos(struct allocation_blob
*blob
)
1041 unsigned int size
= sizeof(struct data_info
);
1042 unsigned int offset
= 0;
1044 while (offset
< blob
->offset
) {
1045 free_single_dinfo((struct data_info
*)(blob
->data
+ offset
));
1050 void free_data_info_allocs(void)
1052 struct allocator_struct
*desc
= &data_info_allocator
;
1053 struct allocation_blob
*blob
= desc
->blobs
;
1056 desc
->allocations
= 0;
1057 desc
->total_bytes
= 0;
1058 desc
->useful_bytes
= 0;
1059 desc
->freelist
= NULL
;
1061 struct allocation_blob
*next
= blob
->next
;
1063 blob_free(blob
, desc
->chunking
);
1066 clear_data_range_alloc();