2 * sparse/smatch_ranges.c
4 * Copyright (C) 2009 Dan Carpenter.
6 * Licensed under the Open Software License version 1.1
12 #include "smatch_extra.h"
13 #include "smatch_slist.h"
15 ALLOCATOR(data_info
, "smatch extra data");
16 ALLOCATOR(data_range
, "data range");
17 __DO_ALLOCATOR(struct data_range
, sizeof(struct data_range
), __alignof__(struct data_range
),
18 "permanent ranges", perm_data_range
);
20 char *show_rl(struct range_list
*list
)
22 struct data_range
*tmp
;
28 FOR_EACH_PTR(list
, tmp
) {
30 strncat(full
, ",", 254 - strlen(full
));
31 if (sval_cmp(tmp
->min
, tmp
->max
) == 0) {
32 strncat(full
, sval_to_str(tmp
->min
), 254 - strlen(full
));
35 strncat(full
, sval_to_str(tmp
->min
), 254 - strlen(full
));
36 strncat(full
, "-", 254 - strlen(full
));
37 strncat(full
, sval_to_str(tmp
->max
), 254 - strlen(full
));
38 } END_FOR_EACH_PTR(tmp
);
39 return alloc_sname(full
);
42 static sval_t
parse_val(struct symbol
*type
, char *c
, char **endp
)
47 if (!strncmp(start
, "max", 3)) {
48 ret
= sval_type_max(type
);
50 } else if (!strncmp(start
, "u64max", 6)) {
51 ret
= sval_type_val(type
, ULLONG_MAX
);
53 } else if (!strncmp(start
, "s64max", 6)) {
54 ret
= sval_type_val(type
, LLONG_MAX
);
56 } else if (!strncmp(start
, "u32max", 6)) {
57 ret
= sval_type_val(type
, UINT_MAX
);
59 } else if (!strncmp(start
, "s32max", 6)) {
60 ret
= sval_type_val(type
, INT_MAX
);
62 } else if (!strncmp(start
, "u16max", 6)) {
63 ret
= sval_type_val(type
, USHRT_MAX
);
65 } else if (!strncmp(start
, "s16max", 6)) {
66 ret
= sval_type_val(type
, SHRT_MAX
);
68 } else if (!strncmp(start
, "min", 3)) {
69 ret
= sval_type_min(type
);
71 } else if (!strncmp(start
, "s64min", 6)) {
72 ret
= sval_type_val(type
, LLONG_MIN
);
74 } else if (!strncmp(start
, "s32min", 6)) {
75 ret
= sval_type_val(type
, INT_MIN
);
77 } else if (!strncmp(start
, "s16min", 6)) {
78 ret
= sval_type_val(type
, SHRT_MIN
);
81 ret
= sval_type_val(type
, strtoll(start
, &c
, 10));
87 void str_to_rl(struct symbol
*type
, char *value
, struct range_list
**rl
)
96 if (value
&& strcmp(value
, "empty") == 0)
103 min
= parse_val(type
, c
, &c
);
107 add_range(rl
, min
, min
);
111 add_range(rl
, min
, min
);
116 sm_msg("debug XXX: trouble parsing %s ", value
);
122 max
= parse_val(type
, c
, &c
);
123 add_range(rl
, min
, max
);
129 sm_msg("debug YYY: trouble parsing %s %s", value
, c
);
135 *rl
= cast_rl(type
, *rl
);
138 int is_whole_rl(struct range_list
*rl
)
140 struct data_range
*drange
;
142 if (ptr_list_empty(rl
))
144 drange
= first_ptr_list((struct ptr_list
*)rl
);
145 if (sval_is_min(drange
->min
) && sval_is_max(drange
->max
))
150 sval_t
rl_min(struct range_list
*rl
)
152 struct data_range
*drange
;
155 ret
.type
= &llong_ctype
;
156 ret
.value
= LLONG_MIN
;
157 if (ptr_list_empty(rl
))
159 drange
= first_ptr_list((struct ptr_list
*)rl
);
163 sval_t
rl_max(struct range_list
*rl
)
165 struct data_range
*drange
;
168 ret
.type
= &llong_ctype
;
169 ret
.value
= LLONG_MAX
;
170 if (ptr_list_empty(rl
))
172 drange
= last_ptr_list((struct ptr_list
*)rl
);
176 static struct data_range
*alloc_range_helper_sval(sval_t min
, sval_t max
, int perm
)
178 struct data_range
*ret
;
181 ret
= __alloc_perm_data_range(0);
183 ret
= __alloc_data_range(0);
189 struct data_range
*alloc_range(sval_t min
, sval_t max
)
191 return alloc_range_helper_sval(min
, max
, 0);
194 struct data_range
*alloc_range_perm(sval_t min
, sval_t max
)
196 return alloc_range_helper_sval(min
, max
, 1);
199 struct range_list
*alloc_rl(sval_t min
, sval_t max
)
201 struct range_list
*rl
= NULL
;
203 if (sval_cmp(min
, max
) > 0)
204 return alloc_whole_rl(min
.type
);
206 add_range(&rl
, min
, max
);
210 struct range_list
*alloc_whole_rl(struct symbol
*type
)
212 if (!type
|| type_positive_bits(type
) < 0)
215 return alloc_rl(sval_type_min(type
), sval_type_max(type
));
218 void add_range(struct range_list
**list
, sval_t min
, sval_t max
)
220 struct data_range
*tmp
= NULL
;
221 struct data_range
*new = NULL
;
225 * FIXME: This has a problem merging a range_list like: min-0,3-max
226 * with a range like 1-2. You end up with min-2,3-max instead of
229 FOR_EACH_PTR(*list
, tmp
) {
231 /* Sometimes we overlap with more than one range
232 so we have to delete or modify the next range. */
233 if (max
.value
+ 1 == tmp
->min
.value
) {
234 /* join 2 ranges here */
236 DELETE_CURRENT_PTR(tmp
);
240 /* Doesn't overlap with the next one. */
241 if (sval_cmp(max
, tmp
->min
) < 0)
243 /* Partially overlaps with the next one. */
244 if (sval_cmp(max
, tmp
->max
) < 0) {
245 tmp
->min
.value
= max
.value
+ 1;
248 /* Completely overlaps with the next one. */
249 if (sval_cmp(max
, tmp
->max
) >= 0) {
250 DELETE_CURRENT_PTR(tmp
);
251 /* there could be more ranges to delete */
255 if (!sval_is_max(max
) && max
.value
+ 1 == tmp
->min
.value
) {
256 /* join 2 ranges into a big range */
257 new = alloc_range(min
, tmp
->max
);
258 REPLACE_CURRENT_PTR(tmp
, new);
261 if (sval_cmp(max
, tmp
->min
) < 0) { /* new range entirely below */
262 new = alloc_range(min
, max
);
263 INSERT_CURRENT(new, tmp
);
266 if (sval_cmp(min
, tmp
->min
) < 0) { /* new range partially below */
267 if (sval_cmp(max
, tmp
->max
) < 0)
271 new = alloc_range(min
, max
);
272 REPLACE_CURRENT_PTR(tmp
, new);
277 if (sval_cmp(max
, tmp
->max
) <= 0) /* new range already included */
279 if (sval_cmp(min
, tmp
->max
) <= 0) { /* new range partially above */
281 new = alloc_range(min
, max
);
282 REPLACE_CURRENT_PTR(tmp
, new);
286 if (!sval_is_min(min
) && min
.value
- 1 == tmp
->max
.value
) {
287 /* join 2 ranges into a big range */
288 new = alloc_range(tmp
->min
, max
);
289 REPLACE_CURRENT_PTR(tmp
, new);
293 /* the new range is entirely above the existing ranges */
294 } END_FOR_EACH_PTR(tmp
);
297 new = alloc_range(min
, max
);
298 add_ptr_list(list
, new);
301 struct range_list
*clone_rl(struct range_list
*list
)
303 struct data_range
*tmp
;
304 struct range_list
*ret
= NULL
;
306 FOR_EACH_PTR(list
, tmp
) {
307 add_ptr_list(&ret
, tmp
);
308 } END_FOR_EACH_PTR(tmp
);
312 struct range_list
*clone_rl_permanent(struct range_list
*list
)
314 struct data_range
*tmp
;
315 struct data_range
*new;
316 struct range_list
*ret
= NULL
;
318 FOR_EACH_PTR(list
, tmp
) {
319 new = alloc_range_perm(tmp
->min
, tmp
->max
);
320 add_ptr_list(&ret
, new);
321 } END_FOR_EACH_PTR(tmp
);
325 struct range_list
*rl_union(struct range_list
*one
, struct range_list
*two
)
327 struct data_range
*tmp
;
328 struct range_list
*ret
= NULL
;
330 FOR_EACH_PTR(one
, tmp
) {
331 add_range(&ret
, tmp
->min
, tmp
->max
);
332 } END_FOR_EACH_PTR(tmp
);
333 FOR_EACH_PTR(two
, tmp
) {
334 add_range(&ret
, tmp
->min
, tmp
->max
);
335 } END_FOR_EACH_PTR(tmp
);
339 struct range_list
*remove_range(struct range_list
*list
, sval_t min
, sval_t max
)
341 struct data_range
*tmp
;
342 struct range_list
*ret
= NULL
;
344 FOR_EACH_PTR(list
, tmp
) {
345 if (sval_cmp(tmp
->max
, min
) < 0) {
346 add_range(&ret
, tmp
->min
, tmp
->max
);
349 if (sval_cmp(tmp
->min
, max
) > 0) {
350 add_range(&ret
, tmp
->min
, tmp
->max
);
353 if (sval_cmp(tmp
->min
, min
) >= 0 && sval_cmp(tmp
->max
, max
) <= 0)
355 if (sval_cmp(tmp
->min
, min
) >= 0) {
357 add_range(&ret
, max
, tmp
->max
);
358 } else if (sval_cmp(tmp
->max
, max
) <= 0) {
360 add_range(&ret
, tmp
->min
, min
);
364 add_range(&ret
, tmp
->min
, min
);
365 add_range(&ret
, max
, tmp
->max
);
367 } END_FOR_EACH_PTR(tmp
);
371 int ranges_equiv(struct data_range
*one
, struct data_range
*two
)
377 if (sval_cmp(one
->min
, two
->min
) != 0)
379 if (sval_cmp(one
->max
, two
->max
) != 0)
384 int rl_equiv(struct range_list
*one
, struct range_list
*two
)
386 struct data_range
*one_range
;
387 struct data_range
*two_range
;
392 PREPARE_PTR_LIST(one
, one_range
);
393 PREPARE_PTR_LIST(two
, two_range
);
395 if (!one_range
&& !two_range
)
397 if (!ranges_equiv(one_range
, two_range
))
399 NEXT_PTR_LIST(one_range
);
400 NEXT_PTR_LIST(two_range
);
402 FINISH_PTR_LIST(two_range
);
403 FINISH_PTR_LIST(one_range
);
408 int true_comparison_range(struct data_range
*left
, int comparison
, struct data_range
*right
)
410 switch (comparison
) {
412 case SPECIAL_UNSIGNED_LT
:
413 if (sval_cmp(left
->min
, right
->max
) < 0)
416 case SPECIAL_UNSIGNED_LTE
:
418 if (sval_cmp(left
->min
, right
->max
) <= 0)
422 if (sval_cmp(left
->max
, right
->min
) < 0)
424 if (sval_cmp(left
->min
, right
->max
) > 0)
427 case SPECIAL_UNSIGNED_GTE
:
429 if (sval_cmp(left
->max
, right
->min
) >= 0)
433 case SPECIAL_UNSIGNED_GT
:
434 if (sval_cmp(left
->max
, right
->min
) > 0)
437 case SPECIAL_NOTEQUAL
:
438 if (sval_cmp(left
->min
, left
->max
) != 0)
440 if (sval_cmp(right
->min
, right
->max
) != 0)
442 if (sval_cmp(left
->min
, right
->min
) != 0)
446 sm_msg("unhandled comparison %d\n", comparison
);
452 int true_comparison_range_LR(int comparison
, struct data_range
*var
, struct data_range
*val
, int left
)
455 return true_comparison_range(var
, comparison
, val
);
457 return true_comparison_range(val
, comparison
, var
);
460 static int false_comparison_range_sval(struct data_range
*left
, int comparison
, struct data_range
*right
)
462 switch (comparison
) {
464 case SPECIAL_UNSIGNED_LT
:
465 if (sval_cmp(left
->max
, right
->min
) >= 0)
468 case SPECIAL_UNSIGNED_LTE
:
470 if (sval_cmp(left
->max
, right
->min
) > 0)
474 if (sval_cmp(left
->min
, left
->max
) != 0)
476 if (sval_cmp(right
->min
, right
->max
) != 0)
478 if (sval_cmp(left
->min
, right
->min
) != 0)
481 case SPECIAL_UNSIGNED_GTE
:
483 if (sval_cmp(left
->min
, right
->max
) < 0)
487 case SPECIAL_UNSIGNED_GT
:
488 if (sval_cmp(left
->min
, right
->max
) <= 0)
491 case SPECIAL_NOTEQUAL
:
492 if (sval_cmp(left
->max
, right
->min
) < 0)
494 if (sval_cmp(left
->min
, right
->max
) > 0)
498 sm_msg("unhandled comparison %d\n", comparison
);
504 int false_comparison_range_LR(int comparison
, struct data_range
*var
, struct data_range
*val
, int left
)
507 return false_comparison_range_sval(var
, comparison
, val
);
509 return false_comparison_range_sval(val
, comparison
, var
);
512 int possibly_true(struct expression
*left
, int comparison
, struct expression
*right
)
514 struct range_list
*rl_left
, *rl_right
;
515 struct data_range
*tmp_left
, *tmp_right
;
517 if (!get_implied_rl(left
, &rl_left
))
519 if (!get_implied_rl(right
, &rl_right
))
522 FOR_EACH_PTR(rl_left
, tmp_left
) {
523 FOR_EACH_PTR(rl_right
, tmp_right
) {
524 if (true_comparison_range(tmp_left
, comparison
, tmp_right
))
526 } END_FOR_EACH_PTR(tmp_right
);
527 } END_FOR_EACH_PTR(tmp_left
);
531 int possibly_false(struct expression
*left
, int comparison
, struct expression
*right
)
533 struct range_list
*rl_left
, *rl_right
;
534 struct data_range
*tmp_left
, *tmp_right
;
536 if (!get_implied_rl(left
, &rl_left
))
538 if (!get_implied_rl(right
, &rl_right
))
541 FOR_EACH_PTR(rl_left
, tmp_left
) {
542 FOR_EACH_PTR(rl_right
, tmp_right
) {
543 if (false_comparison_range_sval(tmp_left
, comparison
, tmp_right
))
545 } END_FOR_EACH_PTR(tmp_right
);
546 } END_FOR_EACH_PTR(tmp_left
);
550 int possibly_true_rl(struct range_list
*left_ranges
, int comparison
, struct range_list
*right_ranges
)
552 struct data_range
*left_tmp
, *right_tmp
;
554 if (!left_ranges
|| !right_ranges
)
557 FOR_EACH_PTR(left_ranges
, left_tmp
) {
558 FOR_EACH_PTR(right_ranges
, right_tmp
) {
559 if (true_comparison_range(left_tmp
, comparison
, right_tmp
))
561 } END_FOR_EACH_PTR(right_tmp
);
562 } END_FOR_EACH_PTR(left_tmp
);
566 int possibly_false_rl(struct range_list
*left_ranges
, int comparison
, struct range_list
*right_ranges
)
568 struct data_range
*left_tmp
, *right_tmp
;
570 if (!left_ranges
|| !right_ranges
)
573 FOR_EACH_PTR(left_ranges
, left_tmp
) {
574 FOR_EACH_PTR(right_ranges
, right_tmp
) {
575 if (false_comparison_range_sval(left_tmp
, comparison
, right_tmp
))
577 } END_FOR_EACH_PTR(right_tmp
);
578 } END_FOR_EACH_PTR(left_tmp
);
582 /* FIXME: the _rl here stands for right left so really it should be _lr */
583 int possibly_true_rl_LR(int comparison
, struct range_list
*a
, struct range_list
*b
, int left
)
586 return possibly_true_rl(a
, comparison
, b
);
588 return possibly_true_rl(b
, comparison
, a
);
591 int possibly_false_rl_LR(int comparison
, struct range_list
*a
, struct range_list
*b
, int left
)
594 return possibly_false_rl(a
, comparison
, b
);
596 return possibly_false_rl(b
, comparison
, a
);
599 void tack_on(struct range_list
**list
, struct data_range
*drange
)
601 add_ptr_list(list
, drange
);
604 void push_rl(struct range_list_stack
**rl_stack
, struct range_list
*rl
)
606 add_ptr_list(rl_stack
, rl
);
609 struct range_list
*pop_rl(struct range_list_stack
**rl_stack
)
611 struct range_list
*rl
;
613 rl
= last_ptr_list((struct ptr_list
*)*rl_stack
);
614 delete_ptr_list_last((struct ptr_list
**)rl_stack
);
618 struct range_list
*top_rl(struct range_list_stack
*rl_stack
)
620 struct range_list
*rl
;
622 rl
= last_ptr_list((struct ptr_list
*)rl_stack
);
626 void filter_top_rl(struct range_list_stack
**rl_stack
, sval_t sval
)
628 struct range_list
*rl
;
630 rl
= pop_rl(rl_stack
);
631 rl
= remove_range(rl
, sval
, sval
);
632 push_rl(rl_stack
, rl
);
635 static int sval_too_big(struct symbol
*type
, sval_t sval
)
637 if (type_bits(type
) == 64)
639 if (sval
.uvalue
> ((1ULL << type_bits(type
)) - 1))
644 static void add_range_t(struct symbol
*type
, struct range_list
**rl
, sval_t min
, sval_t max
)
646 /* If we're just adding a number, cast it and add it */
647 if (sval_cmp(min
, max
) == 0) {
648 add_range(rl
, sval_cast(type
, min
), sval_cast(type
, max
));
652 /* If the range is within the type range then add it */
653 if (sval_fits(type
, min
) && sval_fits(type
, max
)) {
654 add_range(rl
, sval_cast(type
, min
), sval_cast(type
, max
));
659 * If the range we are adding has more bits than the range type then
660 * add the whole range type. Eg:
661 * 0x8000000000000000 - 0xf000000000000000 -> cast to int
662 * This isn't totally the right thing to do. We could be more granular.
664 if (sval_too_big(type
, min
) || sval_too_big(type
, max
)) {
665 add_range(rl
, sval_type_min(type
), sval_type_max(type
));
669 /* Cast negative values to high positive values */
670 if (sval_is_negative(min
) && type_unsigned(type
)) {
671 if (sval_is_positive(max
)) {
672 if (sval_too_high(type
, max
)) {
673 add_range(rl
, sval_type_min(type
), sval_type_max(type
));
676 add_range(rl
, sval_type_val(type
, 0), sval_cast(type
, max
));
677 max
= sval_type_max(type
);
679 max
= sval_cast(type
, max
);
681 min
= sval_cast(type
, min
);
682 add_range(rl
, min
, max
);
685 /* Cast high positive numbers to negative */
686 if (sval_unsigned(max
) && sval_is_negative(sval_cast(type
, max
))) {
687 if (!sval_is_negative(sval_cast(type
, min
))) {
688 add_range(rl
, sval_cast(type
, min
), sval_type_max(type
));
689 min
= sval_type_min(type
);
691 min
= sval_cast(type
, min
);
693 max
= sval_cast(type
, max
);
694 add_range(rl
, min
, max
);
700 struct range_list
*cast_rl(struct symbol
*type
, struct range_list
*rl
)
702 struct data_range
*tmp
;
703 struct range_list
*ret
= NULL
;
711 FOR_EACH_PTR(rl
, tmp
) {
712 add_range_t(type
, &ret
, tmp
->min
, tmp
->max
);
713 } END_FOR_EACH_PTR(tmp
);
716 return alloc_whole_rl(type
);
721 struct range_list
*rl_invert(struct range_list
*orig
)
723 struct range_list
*ret
= NULL
;
724 struct data_range
*tmp
;
725 sval_t gap_min
, abs_max
, sval
;
730 gap_min
= sval_type_min(rl_min(orig
).type
);
731 abs_max
= sval_type_max(rl_max(orig
).type
);
733 FOR_EACH_PTR(orig
, tmp
) {
734 if (sval_cmp(tmp
->min
, gap_min
) > 0) {
735 sval
= sval_type_val(tmp
->min
.type
, tmp
->min
.value
- 1);
736 add_range(&ret
, gap_min
, sval
);
738 gap_min
= sval_type_val(tmp
->max
.type
, tmp
->max
.value
+ 1);
739 if (sval_cmp(tmp
->max
, abs_max
) == 0)
741 } END_FOR_EACH_PTR(tmp
);
743 if (sval_cmp(gap_min
, abs_max
) < 0)
744 add_range(&ret
, gap_min
, abs_max
);
749 struct range_list
*rl_filter(struct range_list
*rl
, struct range_list
*filter
)
751 struct data_range
*tmp
;
753 FOR_EACH_PTR(filter
, tmp
) {
754 rl
= remove_range(rl
, tmp
->min
, tmp
->max
);
755 } END_FOR_EACH_PTR(tmp
);
760 struct range_list
*rl_intersection(struct range_list
*one
, struct range_list
*two
)
764 two
= rl_invert(two
);
765 return rl_filter(one
, two
);
768 void free_rl(struct range_list
**rlist
)
770 __free_ptr_list((struct ptr_list
**)rlist
);
773 static void free_single_dinfo(struct data_info
*dinfo
)
775 free_rl(&dinfo
->value_ranges
);
778 static void free_dinfos(struct allocation_blob
*blob
)
780 unsigned int size
= sizeof(struct data_info
);
781 unsigned int offset
= 0;
783 while (offset
< blob
->offset
) {
784 free_single_dinfo((struct data_info
*)(blob
->data
+ offset
));
789 void free_data_info_allocs(void)
791 struct allocator_struct
*desc
= &data_info_allocator
;
792 struct allocation_blob
*blob
= desc
->blobs
;
795 desc
->allocations
= 0;
796 desc
->total_bytes
= 0;
797 desc
->useful_bytes
= 0;
798 desc
->freelist
= NULL
;
800 struct allocation_blob
*next
= blob
->next
;
802 blob_free(blob
, desc
->chunking
);
805 clear_data_range_alloc();