2 * sparse/smatch_ranges.c
4 * Copyright (C) 2009 Dan Carpenter.
6 * Licensed under the Open Software License version 1.1
12 #include "smatch_extra.h"
13 #include "smatch_slist.h"
15 ALLOCATOR(data_info
, "smatch extra data");
16 ALLOCATOR(data_range
, "data range");
17 __DO_ALLOCATOR(struct data_range
, sizeof(struct data_range
), __alignof__(struct data_range
),
18 "permanent ranges", perm_data_range
);
20 char *show_rl(struct range_list
*list
)
22 struct data_range
*tmp
;
28 FOR_EACH_PTR(list
, tmp
) {
30 strncat(full
, ",", 254 - strlen(full
));
31 if (sval_cmp(tmp
->min
, tmp
->max
) == 0) {
32 strncat(full
, sval_to_str(tmp
->min
), 254 - strlen(full
));
35 strncat(full
, sval_to_str(tmp
->min
), 254 - strlen(full
));
36 strncat(full
, "-", 254 - strlen(full
));
37 strncat(full
, sval_to_str(tmp
->max
), 254 - strlen(full
));
38 } END_FOR_EACH_PTR(tmp
);
39 return alloc_sname(full
);
42 static sval_t
parse_val(struct symbol
*type
, char *c
, char **endp
)
47 if (!strncmp(start
, "max", 3)) {
48 ret
= sval_type_max(type
);
50 } else if (!strncmp(start
, "u64max", 6)) {
51 ret
= sval_type_val(type
, ULLONG_MAX
);
53 } else if (!strncmp(start
, "s64max", 6)) {
54 ret
= sval_type_val(type
, LLONG_MAX
);
56 } else if (!strncmp(start
, "u32max", 6)) {
57 ret
= sval_type_val(type
, UINT_MAX
);
59 } else if (!strncmp(start
, "s32max", 6)) {
60 ret
= sval_type_val(type
, INT_MAX
);
62 } else if (!strncmp(start
, "u16max", 6)) {
63 ret
= sval_type_val(type
, USHRT_MAX
);
65 } else if (!strncmp(start
, "s16max", 6)) {
66 ret
= sval_type_val(type
, SHRT_MAX
);
68 } else if (!strncmp(start
, "min", 3)) {
69 ret
= sval_type_min(type
);
71 } else if (!strncmp(start
, "s64min", 6)) {
72 ret
= sval_type_val(type
, LLONG_MIN
);
74 } else if (!strncmp(start
, "s32min", 6)) {
75 ret
= sval_type_val(type
, INT_MIN
);
77 } else if (!strncmp(start
, "s16min", 6)) {
78 ret
= sval_type_val(type
, SHRT_MIN
);
81 ret
= sval_type_val(type
, strtoll(start
, &c
, 10));
87 void str_to_rl(struct symbol
*type
, char *value
, struct range_list
**rl
)
96 if (value
&& strcmp(value
, "empty") == 0)
103 min
= parse_val(type
, c
, &c
);
107 add_range(rl
, min
, min
);
111 add_range(rl
, min
, min
);
116 sm_msg("debug XXX: trouble parsing %s ", value
);
122 max
= parse_val(type
, c
, &c
);
123 add_range(rl
, min
, max
);
129 sm_msg("debug YYY: trouble parsing %s %s", value
, c
);
135 *rl
= cast_rl(type
, *rl
);
138 int is_whole_rl(struct range_list
*rl
)
140 struct data_range
*drange
;
142 if (ptr_list_empty(rl
))
144 drange
= first_ptr_list((struct ptr_list
*)rl
);
145 if (sval_is_min(drange
->min
) && sval_is_max(drange
->max
))
150 sval_t
rl_min(struct range_list
*rl
)
152 struct data_range
*drange
;
155 ret
.type
= &llong_ctype
;
156 ret
.value
= LLONG_MIN
;
157 if (ptr_list_empty(rl
))
159 drange
= first_ptr_list((struct ptr_list
*)rl
);
163 sval_t
rl_max(struct range_list
*rl
)
165 struct data_range
*drange
;
168 ret
.type
= &llong_ctype
;
169 ret
.value
= LLONG_MAX
;
170 if (ptr_list_empty(rl
))
172 drange
= last_ptr_list((struct ptr_list
*)rl
);
176 struct symbol
*rl_type(struct range_list
*rl
)
180 return rl_min(rl
).type
;
183 static struct data_range
*alloc_range_helper_sval(sval_t min
, sval_t max
, int perm
)
185 struct data_range
*ret
;
188 ret
= __alloc_perm_data_range(0);
190 ret
= __alloc_data_range(0);
196 struct data_range
*alloc_range(sval_t min
, sval_t max
)
198 return alloc_range_helper_sval(min
, max
, 0);
201 struct data_range
*alloc_range_perm(sval_t min
, sval_t max
)
203 return alloc_range_helper_sval(min
, max
, 1);
206 struct range_list
*alloc_rl(sval_t min
, sval_t max
)
208 struct range_list
*rl
= NULL
;
210 if (sval_cmp(min
, max
) > 0)
211 return alloc_whole_rl(min
.type
);
213 add_range(&rl
, min
, max
);
217 struct range_list
*alloc_whole_rl(struct symbol
*type
)
219 if (!type
|| type_positive_bits(type
) < 0)
222 return alloc_rl(sval_type_min(type
), sval_type_max(type
));
225 void add_range(struct range_list
**list
, sval_t min
, sval_t max
)
227 struct data_range
*tmp
= NULL
;
228 struct data_range
*new = NULL
;
232 * FIXME: This has a problem merging a range_list like: min-0,3-max
233 * with a range like 1-2. You end up with min-2,3-max instead of
236 FOR_EACH_PTR(*list
, tmp
) {
238 /* Sometimes we overlap with more than one range
239 so we have to delete or modify the next range. */
240 if (max
.value
+ 1 == tmp
->min
.value
) {
241 /* join 2 ranges here */
243 DELETE_CURRENT_PTR(tmp
);
247 /* Doesn't overlap with the next one. */
248 if (sval_cmp(max
, tmp
->min
) < 0)
250 /* Partially overlaps with the next one. */
251 if (sval_cmp(max
, tmp
->max
) < 0) {
252 tmp
->min
.value
= max
.value
+ 1;
255 /* Completely overlaps with the next one. */
256 if (sval_cmp(max
, tmp
->max
) >= 0) {
257 DELETE_CURRENT_PTR(tmp
);
258 /* there could be more ranges to delete */
262 if (!sval_is_max(max
) && max
.value
+ 1 == tmp
->min
.value
) {
263 /* join 2 ranges into a big range */
264 new = alloc_range(min
, tmp
->max
);
265 REPLACE_CURRENT_PTR(tmp
, new);
268 if (sval_cmp(max
, tmp
->min
) < 0) { /* new range entirely below */
269 new = alloc_range(min
, max
);
270 INSERT_CURRENT(new, tmp
);
273 if (sval_cmp(min
, tmp
->min
) < 0) { /* new range partially below */
274 if (sval_cmp(max
, tmp
->max
) < 0)
278 new = alloc_range(min
, max
);
279 REPLACE_CURRENT_PTR(tmp
, new);
284 if (sval_cmp(max
, tmp
->max
) <= 0) /* new range already included */
286 if (sval_cmp(min
, tmp
->max
) <= 0) { /* new range partially above */
288 new = alloc_range(min
, max
);
289 REPLACE_CURRENT_PTR(tmp
, new);
293 if (!sval_is_min(min
) && min
.value
- 1 == tmp
->max
.value
) {
294 /* join 2 ranges into a big range */
295 new = alloc_range(tmp
->min
, max
);
296 REPLACE_CURRENT_PTR(tmp
, new);
300 /* the new range is entirely above the existing ranges */
301 } END_FOR_EACH_PTR(tmp
);
304 new = alloc_range(min
, max
);
305 add_ptr_list(list
, new);
308 struct range_list
*clone_rl(struct range_list
*list
)
310 struct data_range
*tmp
;
311 struct range_list
*ret
= NULL
;
313 FOR_EACH_PTR(list
, tmp
) {
314 add_ptr_list(&ret
, tmp
);
315 } END_FOR_EACH_PTR(tmp
);
319 struct range_list
*clone_rl_permanent(struct range_list
*list
)
321 struct data_range
*tmp
;
322 struct data_range
*new;
323 struct range_list
*ret
= NULL
;
325 FOR_EACH_PTR(list
, tmp
) {
326 new = alloc_range_perm(tmp
->min
, tmp
->max
);
327 add_ptr_list(&ret
, new);
328 } END_FOR_EACH_PTR(tmp
);
332 struct range_list
*rl_union(struct range_list
*one
, struct range_list
*two
)
334 struct data_range
*tmp
;
335 struct range_list
*ret
= NULL
;
337 FOR_EACH_PTR(one
, tmp
) {
338 add_range(&ret
, tmp
->min
, tmp
->max
);
339 } END_FOR_EACH_PTR(tmp
);
340 FOR_EACH_PTR(two
, tmp
) {
341 add_range(&ret
, tmp
->min
, tmp
->max
);
342 } END_FOR_EACH_PTR(tmp
);
346 struct range_list
*remove_range(struct range_list
*list
, sval_t min
, sval_t max
)
348 struct data_range
*tmp
;
349 struct range_list
*ret
= NULL
;
351 FOR_EACH_PTR(list
, tmp
) {
352 if (sval_cmp(tmp
->max
, min
) < 0) {
353 add_range(&ret
, tmp
->min
, tmp
->max
);
356 if (sval_cmp(tmp
->min
, max
) > 0) {
357 add_range(&ret
, tmp
->min
, tmp
->max
);
360 if (sval_cmp(tmp
->min
, min
) >= 0 && sval_cmp(tmp
->max
, max
) <= 0)
362 if (sval_cmp(tmp
->min
, min
) >= 0) {
364 add_range(&ret
, max
, tmp
->max
);
365 } else if (sval_cmp(tmp
->max
, max
) <= 0) {
367 add_range(&ret
, tmp
->min
, min
);
371 add_range(&ret
, tmp
->min
, min
);
372 add_range(&ret
, max
, tmp
->max
);
374 } END_FOR_EACH_PTR(tmp
);
378 int ranges_equiv(struct data_range
*one
, struct data_range
*two
)
384 if (sval_cmp(one
->min
, two
->min
) != 0)
386 if (sval_cmp(one
->max
, two
->max
) != 0)
391 int rl_equiv(struct range_list
*one
, struct range_list
*two
)
393 struct data_range
*one_range
;
394 struct data_range
*two_range
;
399 PREPARE_PTR_LIST(one
, one_range
);
400 PREPARE_PTR_LIST(two
, two_range
);
402 if (!one_range
&& !two_range
)
404 if (!ranges_equiv(one_range
, two_range
))
406 NEXT_PTR_LIST(one_range
);
407 NEXT_PTR_LIST(two_range
);
409 FINISH_PTR_LIST(two_range
);
410 FINISH_PTR_LIST(one_range
);
415 int true_comparison_range(struct data_range
*left
, int comparison
, struct data_range
*right
)
417 switch (comparison
) {
419 case SPECIAL_UNSIGNED_LT
:
420 if (sval_cmp(left
->min
, right
->max
) < 0)
423 case SPECIAL_UNSIGNED_LTE
:
425 if (sval_cmp(left
->min
, right
->max
) <= 0)
429 if (sval_cmp(left
->max
, right
->min
) < 0)
431 if (sval_cmp(left
->min
, right
->max
) > 0)
434 case SPECIAL_UNSIGNED_GTE
:
436 if (sval_cmp(left
->max
, right
->min
) >= 0)
440 case SPECIAL_UNSIGNED_GT
:
441 if (sval_cmp(left
->max
, right
->min
) > 0)
444 case SPECIAL_NOTEQUAL
:
445 if (sval_cmp(left
->min
, left
->max
) != 0)
447 if (sval_cmp(right
->min
, right
->max
) != 0)
449 if (sval_cmp(left
->min
, right
->min
) != 0)
453 sm_msg("unhandled comparison %d\n", comparison
);
459 int true_comparison_range_LR(int comparison
, struct data_range
*var
, struct data_range
*val
, int left
)
462 return true_comparison_range(var
, comparison
, val
);
464 return true_comparison_range(val
, comparison
, var
);
467 static int false_comparison_range_sval(struct data_range
*left
, int comparison
, struct data_range
*right
)
469 switch (comparison
) {
471 case SPECIAL_UNSIGNED_LT
:
472 if (sval_cmp(left
->max
, right
->min
) >= 0)
475 case SPECIAL_UNSIGNED_LTE
:
477 if (sval_cmp(left
->max
, right
->min
) > 0)
481 if (sval_cmp(left
->min
, left
->max
) != 0)
483 if (sval_cmp(right
->min
, right
->max
) != 0)
485 if (sval_cmp(left
->min
, right
->min
) != 0)
488 case SPECIAL_UNSIGNED_GTE
:
490 if (sval_cmp(left
->min
, right
->max
) < 0)
494 case SPECIAL_UNSIGNED_GT
:
495 if (sval_cmp(left
->min
, right
->max
) <= 0)
498 case SPECIAL_NOTEQUAL
:
499 if (sval_cmp(left
->max
, right
->min
) < 0)
501 if (sval_cmp(left
->min
, right
->max
) > 0)
505 sm_msg("unhandled comparison %d\n", comparison
);
511 int false_comparison_range_LR(int comparison
, struct data_range
*var
, struct data_range
*val
, int left
)
514 return false_comparison_range_sval(var
, comparison
, val
);
516 return false_comparison_range_sval(val
, comparison
, var
);
519 int possibly_true(struct expression
*left
, int comparison
, struct expression
*right
)
521 struct range_list
*rl_left
, *rl_right
;
522 struct data_range
*tmp_left
, *tmp_right
;
524 if (!get_implied_rl(left
, &rl_left
))
526 if (!get_implied_rl(right
, &rl_right
))
529 FOR_EACH_PTR(rl_left
, tmp_left
) {
530 FOR_EACH_PTR(rl_right
, tmp_right
) {
531 if (true_comparison_range(tmp_left
, comparison
, tmp_right
))
533 } END_FOR_EACH_PTR(tmp_right
);
534 } END_FOR_EACH_PTR(tmp_left
);
538 int possibly_false(struct expression
*left
, int comparison
, struct expression
*right
)
540 struct range_list
*rl_left
, *rl_right
;
541 struct data_range
*tmp_left
, *tmp_right
;
543 if (!get_implied_rl(left
, &rl_left
))
545 if (!get_implied_rl(right
, &rl_right
))
548 FOR_EACH_PTR(rl_left
, tmp_left
) {
549 FOR_EACH_PTR(rl_right
, tmp_right
) {
550 if (false_comparison_range_sval(tmp_left
, comparison
, tmp_right
))
552 } END_FOR_EACH_PTR(tmp_right
);
553 } END_FOR_EACH_PTR(tmp_left
);
557 int possibly_true_rl(struct range_list
*left_ranges
, int comparison
, struct range_list
*right_ranges
)
559 struct data_range
*left_tmp
, *right_tmp
;
561 if (!left_ranges
|| !right_ranges
)
564 FOR_EACH_PTR(left_ranges
, left_tmp
) {
565 FOR_EACH_PTR(right_ranges
, right_tmp
) {
566 if (true_comparison_range(left_tmp
, comparison
, right_tmp
))
568 } END_FOR_EACH_PTR(right_tmp
);
569 } END_FOR_EACH_PTR(left_tmp
);
573 int possibly_false_rl(struct range_list
*left_ranges
, int comparison
, struct range_list
*right_ranges
)
575 struct data_range
*left_tmp
, *right_tmp
;
577 if (!left_ranges
|| !right_ranges
)
580 FOR_EACH_PTR(left_ranges
, left_tmp
) {
581 FOR_EACH_PTR(right_ranges
, right_tmp
) {
582 if (false_comparison_range_sval(left_tmp
, comparison
, right_tmp
))
584 } END_FOR_EACH_PTR(right_tmp
);
585 } END_FOR_EACH_PTR(left_tmp
);
589 /* FIXME: the _rl here stands for right left so really it should be _lr */
590 int possibly_true_rl_LR(int comparison
, struct range_list
*a
, struct range_list
*b
, int left
)
593 return possibly_true_rl(a
, comparison
, b
);
595 return possibly_true_rl(b
, comparison
, a
);
598 int possibly_false_rl_LR(int comparison
, struct range_list
*a
, struct range_list
*b
, int left
)
601 return possibly_false_rl(a
, comparison
, b
);
603 return possibly_false_rl(b
, comparison
, a
);
606 void tack_on(struct range_list
**list
, struct data_range
*drange
)
608 add_ptr_list(list
, drange
);
611 void push_rl(struct range_list_stack
**rl_stack
, struct range_list
*rl
)
613 add_ptr_list(rl_stack
, rl
);
616 struct range_list
*pop_rl(struct range_list_stack
**rl_stack
)
618 struct range_list
*rl
;
620 rl
= last_ptr_list((struct ptr_list
*)*rl_stack
);
621 delete_ptr_list_last((struct ptr_list
**)rl_stack
);
625 struct range_list
*top_rl(struct range_list_stack
*rl_stack
)
627 struct range_list
*rl
;
629 rl
= last_ptr_list((struct ptr_list
*)rl_stack
);
633 void filter_top_rl(struct range_list_stack
**rl_stack
, sval_t sval
)
635 struct range_list
*rl
;
637 rl
= pop_rl(rl_stack
);
638 rl
= remove_range(rl
, sval
, sval
);
639 push_rl(rl_stack
, rl
);
642 static int sval_too_big(struct symbol
*type
, sval_t sval
)
644 if (type_bits(type
) == 64)
646 if (sval
.uvalue
> ((1ULL << type_bits(type
)) - 1))
651 static void add_range_t(struct symbol
*type
, struct range_list
**rl
, sval_t min
, sval_t max
)
653 /* If we're just adding a number, cast it and add it */
654 if (sval_cmp(min
, max
) == 0) {
655 add_range(rl
, sval_cast(type
, min
), sval_cast(type
, max
));
659 /* If the range is within the type range then add it */
660 if (sval_fits(type
, min
) && sval_fits(type
, max
)) {
661 add_range(rl
, sval_cast(type
, min
), sval_cast(type
, max
));
666 * If the range we are adding has more bits than the range type then
667 * add the whole range type. Eg:
668 * 0x8000000000000000 - 0xf000000000000000 -> cast to int
669 * This isn't totally the right thing to do. We could be more granular.
671 if (sval_too_big(type
, min
) || sval_too_big(type
, max
)) {
672 add_range(rl
, sval_type_min(type
), sval_type_max(type
));
676 /* Cast negative values to high positive values */
677 if (sval_is_negative(min
) && type_unsigned(type
)) {
678 if (sval_is_positive(max
)) {
679 if (sval_too_high(type
, max
)) {
680 add_range(rl
, sval_type_min(type
), sval_type_max(type
));
683 add_range(rl
, sval_type_val(type
, 0), sval_cast(type
, max
));
684 max
= sval_type_max(type
);
686 max
= sval_cast(type
, max
);
688 min
= sval_cast(type
, min
);
689 add_range(rl
, min
, max
);
692 /* Cast high positive numbers to negative */
693 if (sval_unsigned(max
) && sval_is_negative(sval_cast(type
, max
))) {
694 if (!sval_is_negative(sval_cast(type
, min
))) {
695 add_range(rl
, sval_cast(type
, min
), sval_type_max(type
));
696 min
= sval_type_min(type
);
698 min
= sval_cast(type
, min
);
700 max
= sval_cast(type
, max
);
701 add_range(rl
, min
, max
);
707 struct range_list
*rl_truncate_cast(struct symbol
*type
, struct range_list
*rl
)
709 struct data_range
*tmp
;
710 struct range_list
*ret
= NULL
;
716 if (!type
|| type
== rl_type(rl
))
719 FOR_EACH_PTR(rl
, tmp
) {
722 if (type_bits(type
) < type_bits(rl_type(rl
))) {
723 min
.uvalue
= tmp
->min
.uvalue
& ((1ULL << type_bits(type
)) - 1);
724 max
.uvalue
= tmp
->max
.uvalue
& ((1ULL << type_bits(type
)) - 1);
726 if (sval_cmp(min
, max
) > 0) {
727 min
= sval_cast(type
, min
);
728 max
= sval_cast(type
, max
);
730 add_range_t(type
, &ret
, min
, max
);
731 } END_FOR_EACH_PTR(tmp
);
736 static int rl_is_sane(struct range_list
*rl
)
738 struct data_range
*tmp
;
742 FOR_EACH_PTR(rl
, tmp
) {
743 if (!sval_fits(type
, tmp
->min
))
745 if (!sval_fits(type
, tmp
->max
))
747 if (sval_cmp(tmp
->min
, tmp
->max
) > 0)
749 } END_FOR_EACH_PTR(tmp
);
754 static int rl_type_consistent(struct range_list
*rl
)
756 struct data_range
*tmp
;
760 FOR_EACH_PTR(rl
, tmp
) {
761 if (type
!= tmp
->min
.type
|| type
!= tmp
->max
.type
)
763 } END_FOR_EACH_PTR(tmp
);
767 struct range_list
*cast_rl(struct symbol
*type
, struct range_list
*rl
)
769 struct data_range
*tmp
;
770 struct range_list
*ret
= NULL
;
778 return alloc_whole_rl(type
);
779 if (type
== rl_type(rl
) && rl_type_consistent(rl
))
782 FOR_EACH_PTR(rl
, tmp
) {
783 add_range_t(type
, &ret
, tmp
->min
, tmp
->max
);
784 } END_FOR_EACH_PTR(tmp
);
787 return alloc_whole_rl(type
);
792 struct range_list
*rl_invert(struct range_list
*orig
)
794 struct range_list
*ret
= NULL
;
795 struct data_range
*tmp
;
796 sval_t gap_min
, abs_max
, sval
;
801 gap_min
= sval_type_min(rl_min(orig
).type
);
802 abs_max
= sval_type_max(rl_max(orig
).type
);
804 FOR_EACH_PTR(orig
, tmp
) {
805 if (sval_cmp(tmp
->min
, gap_min
) > 0) {
806 sval
= sval_type_val(tmp
->min
.type
, tmp
->min
.value
- 1);
807 add_range(&ret
, gap_min
, sval
);
809 gap_min
= sval_type_val(tmp
->max
.type
, tmp
->max
.value
+ 1);
810 if (sval_cmp(tmp
->max
, abs_max
) == 0)
812 } END_FOR_EACH_PTR(tmp
);
814 if (sval_cmp(gap_min
, abs_max
) < 0)
815 add_range(&ret
, gap_min
, abs_max
);
820 struct range_list
*rl_filter(struct range_list
*rl
, struct range_list
*filter
)
822 struct data_range
*tmp
;
824 FOR_EACH_PTR(filter
, tmp
) {
825 rl
= remove_range(rl
, tmp
->min
, tmp
->max
);
826 } END_FOR_EACH_PTR(tmp
);
831 struct range_list
*rl_intersection(struct range_list
*one
, struct range_list
*two
)
835 two
= rl_invert(two
);
836 return rl_filter(one
, two
);
839 void free_rl(struct range_list
**rlist
)
841 __free_ptr_list((struct ptr_list
**)rlist
);
844 static void free_single_dinfo(struct data_info
*dinfo
)
846 free_rl(&dinfo
->value_ranges
);
849 static void free_dinfos(struct allocation_blob
*blob
)
851 unsigned int size
= sizeof(struct data_info
);
852 unsigned int offset
= 0;
854 while (offset
< blob
->offset
) {
855 free_single_dinfo((struct data_info
*)(blob
->data
+ offset
));
860 void free_data_info_allocs(void)
862 struct allocator_struct
*desc
= &data_info_allocator
;
863 struct allocation_blob
*blob
= desc
->blobs
;
866 desc
->allocations
= 0;
867 desc
->total_bytes
= 0;
868 desc
->useful_bytes
= 0;
869 desc
->freelist
= NULL
;
871 struct allocation_blob
*next
= blob
->next
;
873 blob_free(blob
, desc
->chunking
);
876 clear_data_range_alloc();