2 * sparse/smatch_ranges.c
4 * Copyright (C) 2009 Dan Carpenter.
6 * Licensed under the Open Software License version 1.1
12 #include "smatch_extra.h"
13 #include "smatch_slist.h"
15 ALLOCATOR(data_info
, "smatch extra data");
16 ALLOCATOR(data_range
, "data range");
17 __DO_ALLOCATOR(struct data_range
, sizeof(struct data_range
), __alignof__(struct data_range
),
18 "permanent ranges", perm_data_range
);
20 char *show_rl(struct range_list
*list
)
22 struct data_range
*tmp
;
28 FOR_EACH_PTR(list
, tmp
) {
30 strncat(full
, ",", 254 - strlen(full
));
31 if (sval_cmp(tmp
->min
, tmp
->max
) == 0) {
32 strncat(full
, sval_to_str(tmp
->min
), 254 - strlen(full
));
35 strncat(full
, sval_to_str(tmp
->min
), 254 - strlen(full
));
36 strncat(full
, "-", 254 - strlen(full
));
37 strncat(full
, sval_to_str(tmp
->max
), 254 - strlen(full
));
38 } END_FOR_EACH_PTR(tmp
);
39 return alloc_sname(full
);
42 static sval_t
parse_val(struct symbol
*type
, char *c
, char **endp
)
47 if (!strncmp(start
, "max", 3)) {
48 ret
= sval_type_max(type
);
50 } else if (!strncmp(start
, "u64max", 6)) {
51 ret
= sval_type_val(type
, ULLONG_MAX
);
53 } else if (!strncmp(start
, "s64max", 6)) {
54 ret
= sval_type_val(type
, LLONG_MAX
);
56 } else if (!strncmp(start
, "u32max", 6)) {
57 ret
= sval_type_val(type
, UINT_MAX
);
59 } else if (!strncmp(start
, "s32max", 6)) {
60 ret
= sval_type_val(type
, INT_MAX
);
62 } else if (!strncmp(start
, "u16max", 6)) {
63 ret
= sval_type_val(type
, USHRT_MAX
);
65 } else if (!strncmp(start
, "s16max", 6)) {
66 ret
= sval_type_val(type
, SHRT_MAX
);
68 } else if (!strncmp(start
, "min", 3)) {
69 ret
= sval_type_min(type
);
71 } else if (!strncmp(start
, "s64min", 6)) {
72 ret
= sval_type_val(type
, LLONG_MIN
);
74 } else if (!strncmp(start
, "s32min", 6)) {
75 ret
= sval_type_val(type
, INT_MIN
);
77 } else if (!strncmp(start
, "s16min", 6)) {
78 ret
= sval_type_val(type
, SHRT_MIN
);
81 ret
= sval_type_val(type
, strtoll(start
, &c
, 10));
87 void str_to_rl(struct symbol
*type
, char *value
, struct range_list
**rl
)
100 min
= parse_val(type
, c
, &c
);
104 add_range(rl
, min
, min
);
108 add_range(rl
, min
, min
);
113 sm_msg("debug XXX: trouble parsing %s ", value
);
119 max
= parse_val(type
, c
, &c
);
120 add_range(rl
, min
, max
);
126 sm_msg("debug YYY: trouble parsing %s %s", value
, c
);
132 *rl
= cast_rl(type
, *rl
);
135 int is_whole_rl(struct range_list
*rl
)
137 struct data_range
*drange
;
139 if (ptr_list_empty(rl
))
141 drange
= first_ptr_list((struct ptr_list
*)rl
);
142 if (sval_is_min(drange
->min
) && sval_is_max(drange
->max
))
147 sval_t
rl_min(struct range_list
*rl
)
149 struct data_range
*drange
;
152 ret
.type
= &llong_ctype
;
153 ret
.value
= LLONG_MIN
;
154 if (ptr_list_empty(rl
))
156 drange
= first_ptr_list((struct ptr_list
*)rl
);
160 sval_t
rl_max(struct range_list
*rl
)
162 struct data_range
*drange
;
165 ret
.type
= &llong_ctype
;
166 ret
.value
= LLONG_MAX
;
167 if (ptr_list_empty(rl
))
169 drange
= last_ptr_list((struct ptr_list
*)rl
);
173 static struct data_range
*alloc_range_helper_sval(sval_t min
, sval_t max
, int perm
)
175 struct data_range
*ret
;
178 ret
= __alloc_perm_data_range(0);
180 ret
= __alloc_data_range(0);
186 struct data_range
*alloc_range(sval_t min
, sval_t max
)
188 return alloc_range_helper_sval(min
, max
, 0);
191 struct data_range
*alloc_range_perm(sval_t min
, sval_t max
)
193 return alloc_range_helper_sval(min
, max
, 1);
196 struct range_list
*alloc_rl(sval_t min
, sval_t max
)
198 struct range_list
*rl
= NULL
;
200 if (sval_cmp(min
, max
) > 0)
201 return alloc_whole_rl(min
.type
);
203 add_range(&rl
, min
, max
);
207 struct range_list
*alloc_whole_rl(struct symbol
*type
)
209 if (!type
|| type_positive_bits(type
) < 0)
212 return alloc_rl(sval_type_min(type
), sval_type_max(type
));
215 void add_range(struct range_list
**list
, sval_t min
, sval_t max
)
217 struct data_range
*tmp
= NULL
;
218 struct data_range
*new = NULL
;
222 * FIXME: This has a problem merging a range_list like: min-0,3-max
223 * with a range like 1-2. You end up with min-2,3-max instead of
226 FOR_EACH_PTR(*list
, tmp
) {
228 /* Sometimes we overlap with more than one range
229 so we have to delete or modify the next range. */
230 if (max
.value
+ 1 == tmp
->min
.value
) {
231 /* join 2 ranges here */
233 DELETE_CURRENT_PTR(tmp
);
237 /* Doesn't overlap with the next one. */
238 if (sval_cmp(max
, tmp
->min
) < 0)
240 /* Partially overlaps with the next one. */
241 if (sval_cmp(max
, tmp
->max
) < 0) {
242 tmp
->min
.value
= max
.value
+ 1;
245 /* Completely overlaps with the next one. */
246 if (sval_cmp(max
, tmp
->max
) >= 0) {
247 DELETE_CURRENT_PTR(tmp
);
248 /* there could be more ranges to delete */
252 if (!sval_is_max(max
) && max
.value
+ 1 == tmp
->min
.value
) {
253 /* join 2 ranges into a big range */
254 new = alloc_range(min
, tmp
->max
);
255 REPLACE_CURRENT_PTR(tmp
, new);
258 if (sval_cmp(max
, tmp
->min
) < 0) { /* new range entirely below */
259 new = alloc_range(min
, max
);
260 INSERT_CURRENT(new, tmp
);
263 if (sval_cmp(min
, tmp
->min
) < 0) { /* new range partially below */
264 if (sval_cmp(max
, tmp
->max
) < 0)
268 new = alloc_range(min
, max
);
269 REPLACE_CURRENT_PTR(tmp
, new);
274 if (sval_cmp(max
, tmp
->max
) <= 0) /* new range already included */
276 if (sval_cmp(min
, tmp
->max
) <= 0) { /* new range partially above */
278 new = alloc_range(min
, max
);
279 REPLACE_CURRENT_PTR(tmp
, new);
283 if (!sval_is_min(min
) && min
.value
- 1 == tmp
->max
.value
) {
284 /* join 2 ranges into a big range */
285 new = alloc_range(tmp
->min
, max
);
286 REPLACE_CURRENT_PTR(tmp
, new);
290 /* the new range is entirely above the existing ranges */
291 } END_FOR_EACH_PTR(tmp
);
294 new = alloc_range(min
, max
);
295 add_ptr_list(list
, new);
298 struct range_list
*clone_rl(struct range_list
*list
)
300 struct data_range
*tmp
;
301 struct range_list
*ret
= NULL
;
303 FOR_EACH_PTR(list
, tmp
) {
304 add_ptr_list(&ret
, tmp
);
305 } END_FOR_EACH_PTR(tmp
);
309 struct range_list
*clone_rl_permanent(struct range_list
*list
)
311 struct data_range
*tmp
;
312 struct data_range
*new;
313 struct range_list
*ret
= NULL
;
315 FOR_EACH_PTR(list
, tmp
) {
316 new = alloc_range_perm(tmp
->min
, tmp
->max
);
317 add_ptr_list(&ret
, new);
318 } END_FOR_EACH_PTR(tmp
);
322 struct range_list
*rl_union(struct range_list
*one
, struct range_list
*two
)
324 struct data_range
*tmp
;
325 struct range_list
*ret
= NULL
;
327 FOR_EACH_PTR(one
, tmp
) {
328 add_range(&ret
, tmp
->min
, tmp
->max
);
329 } END_FOR_EACH_PTR(tmp
);
330 FOR_EACH_PTR(two
, tmp
) {
331 add_range(&ret
, tmp
->min
, tmp
->max
);
332 } END_FOR_EACH_PTR(tmp
);
336 struct range_list
*remove_range(struct range_list
*list
, sval_t min
, sval_t max
)
338 struct data_range
*tmp
;
339 struct range_list
*ret
= NULL
;
341 FOR_EACH_PTR(list
, tmp
) {
342 if (sval_cmp(tmp
->max
, min
) < 0) {
343 add_range(&ret
, tmp
->min
, tmp
->max
);
346 if (sval_cmp(tmp
->min
, max
) > 0) {
347 add_range(&ret
, tmp
->min
, tmp
->max
);
350 if (sval_cmp(tmp
->min
, min
) >= 0 && sval_cmp(tmp
->max
, max
) <= 0)
352 if (sval_cmp(tmp
->min
, min
) >= 0) {
354 add_range(&ret
, max
, tmp
->max
);
355 } else if (sval_cmp(tmp
->max
, max
) <= 0) {
357 add_range(&ret
, tmp
->min
, min
);
361 add_range(&ret
, tmp
->min
, min
);
362 add_range(&ret
, max
, tmp
->max
);
364 } END_FOR_EACH_PTR(tmp
);
368 int ranges_equiv(struct data_range
*one
, struct data_range
*two
)
374 if (sval_cmp(one
->min
, two
->min
) != 0)
376 if (sval_cmp(one
->max
, two
->max
) != 0)
381 int rl_equiv(struct range_list
*one
, struct range_list
*two
)
383 struct data_range
*one_range
;
384 struct data_range
*two_range
;
389 PREPARE_PTR_LIST(one
, one_range
);
390 PREPARE_PTR_LIST(two
, two_range
);
392 if (!one_range
&& !two_range
)
394 if (!ranges_equiv(one_range
, two_range
))
396 NEXT_PTR_LIST(one_range
);
397 NEXT_PTR_LIST(two_range
);
399 FINISH_PTR_LIST(two_range
);
400 FINISH_PTR_LIST(one_range
);
405 int true_comparison_range(struct data_range
*left
, int comparison
, struct data_range
*right
)
407 switch (comparison
) {
409 case SPECIAL_UNSIGNED_LT
:
410 if (sval_cmp(left
->min
, right
->max
) < 0)
413 case SPECIAL_UNSIGNED_LTE
:
415 if (sval_cmp(left
->min
, right
->max
) <= 0)
419 if (sval_cmp(left
->max
, right
->min
) < 0)
421 if (sval_cmp(left
->min
, right
->max
) > 0)
424 case SPECIAL_UNSIGNED_GTE
:
426 if (sval_cmp(left
->max
, right
->min
) >= 0)
430 case SPECIAL_UNSIGNED_GT
:
431 if (sval_cmp(left
->max
, right
->min
) > 0)
434 case SPECIAL_NOTEQUAL
:
435 if (sval_cmp(left
->min
, left
->max
) != 0)
437 if (sval_cmp(right
->min
, right
->max
) != 0)
439 if (sval_cmp(left
->min
, right
->min
) != 0)
443 sm_msg("unhandled comparison %d\n", comparison
);
449 int true_comparison_range_LR(int comparison
, struct data_range
*var
, struct data_range
*val
, int left
)
452 return true_comparison_range(var
, comparison
, val
);
454 return true_comparison_range(val
, comparison
, var
);
457 static int false_comparison_range_sval(struct data_range
*left
, int comparison
, struct data_range
*right
)
459 switch (comparison
) {
461 case SPECIAL_UNSIGNED_LT
:
462 if (sval_cmp(left
->max
, right
->min
) >= 0)
465 case SPECIAL_UNSIGNED_LTE
:
467 if (sval_cmp(left
->max
, right
->min
) > 0)
471 if (sval_cmp(left
->min
, left
->max
) != 0)
473 if (sval_cmp(right
->min
, right
->max
) != 0)
475 if (sval_cmp(left
->min
, right
->min
) != 0)
478 case SPECIAL_UNSIGNED_GTE
:
480 if (sval_cmp(left
->min
, right
->max
) < 0)
484 case SPECIAL_UNSIGNED_GT
:
485 if (sval_cmp(left
->min
, right
->max
) <= 0)
488 case SPECIAL_NOTEQUAL
:
489 if (sval_cmp(left
->max
, right
->min
) < 0)
491 if (sval_cmp(left
->min
, right
->max
) > 0)
495 sm_msg("unhandled comparison %d\n", comparison
);
501 int false_comparison_range_LR(int comparison
, struct data_range
*var
, struct data_range
*val
, int left
)
504 return false_comparison_range_sval(var
, comparison
, val
);
506 return false_comparison_range_sval(val
, comparison
, var
);
509 int possibly_true(struct expression
*left
, int comparison
, struct expression
*right
)
511 struct range_list
*rl_left
, *rl_right
;
512 struct data_range
*tmp_left
, *tmp_right
;
514 if (!get_implied_rl(left
, &rl_left
))
516 if (!get_implied_rl(right
, &rl_right
))
519 FOR_EACH_PTR(rl_left
, tmp_left
) {
520 FOR_EACH_PTR(rl_right
, tmp_right
) {
521 if (true_comparison_range(tmp_left
, comparison
, tmp_right
))
523 } END_FOR_EACH_PTR(tmp_right
);
524 } END_FOR_EACH_PTR(tmp_left
);
528 int possibly_false(struct expression
*left
, int comparison
, struct expression
*right
)
530 struct range_list
*rl_left
, *rl_right
;
531 struct data_range
*tmp_left
, *tmp_right
;
533 if (!get_implied_rl(left
, &rl_left
))
535 if (!get_implied_rl(right
, &rl_right
))
538 FOR_EACH_PTR(rl_left
, tmp_left
) {
539 FOR_EACH_PTR(rl_right
, tmp_right
) {
540 if (false_comparison_range_sval(tmp_left
, comparison
, tmp_right
))
542 } END_FOR_EACH_PTR(tmp_right
);
543 } END_FOR_EACH_PTR(tmp_left
);
547 int possibly_true_rl(struct range_list
*left_ranges
, int comparison
, struct range_list
*right_ranges
)
549 struct data_range
*left_tmp
, *right_tmp
;
551 if (!left_ranges
|| !right_ranges
)
554 FOR_EACH_PTR(left_ranges
, left_tmp
) {
555 FOR_EACH_PTR(right_ranges
, right_tmp
) {
556 if (true_comparison_range(left_tmp
, comparison
, right_tmp
))
558 } END_FOR_EACH_PTR(right_tmp
);
559 } END_FOR_EACH_PTR(left_tmp
);
563 int possibly_false_rl(struct range_list
*left_ranges
, int comparison
, struct range_list
*right_ranges
)
565 struct data_range
*left_tmp
, *right_tmp
;
567 if (!left_ranges
|| !right_ranges
)
570 FOR_EACH_PTR(left_ranges
, left_tmp
) {
571 FOR_EACH_PTR(right_ranges
, right_tmp
) {
572 if (false_comparison_range_sval(left_tmp
, comparison
, right_tmp
))
574 } END_FOR_EACH_PTR(right_tmp
);
575 } END_FOR_EACH_PTR(left_tmp
);
579 /* FIXME: the _rl here stands for right left so really it should be _lr */
580 int possibly_true_rl_LR(int comparison
, struct range_list
*a
, struct range_list
*b
, int left
)
583 return possibly_true_rl(a
, comparison
, b
);
585 return possibly_true_rl(b
, comparison
, a
);
588 int possibly_false_rl_LR(int comparison
, struct range_list
*a
, struct range_list
*b
, int left
)
591 return possibly_false_rl(a
, comparison
, b
);
593 return possibly_false_rl(b
, comparison
, a
);
596 void tack_on(struct range_list
**list
, struct data_range
*drange
)
598 add_ptr_list(list
, drange
);
601 void push_rl(struct range_list_stack
**rl_stack
, struct range_list
*rl
)
603 add_ptr_list(rl_stack
, rl
);
606 struct range_list
*pop_rl(struct range_list_stack
**rl_stack
)
608 struct range_list
*rl
;
610 rl
= last_ptr_list((struct ptr_list
*)*rl_stack
);
611 delete_ptr_list_last((struct ptr_list
**)rl_stack
);
615 struct range_list
*top_rl(struct range_list_stack
*rl_stack
)
617 struct range_list
*rl
;
619 rl
= last_ptr_list((struct ptr_list
*)rl_stack
);
623 void filter_top_rl(struct range_list_stack
**rl_stack
, sval_t sval
)
625 struct range_list
*rl
;
627 rl
= pop_rl(rl_stack
);
628 rl
= remove_range(rl
, sval
, sval
);
629 push_rl(rl_stack
, rl
);
632 static int sval_too_big(struct symbol
*type
, sval_t sval
)
634 if (type_bits(type
) == 64)
636 if (sval
.uvalue
> ((1ULL << type_bits(type
)) - 1))
641 static void add_range_t(struct symbol
*type
, struct range_list
**rl
, sval_t min
, sval_t max
)
643 /* If we're just adding a number, cast it and add it */
644 if (sval_cmp(min
, max
) == 0) {
645 add_range(rl
, sval_cast(type
, min
), sval_cast(type
, max
));
649 /* If the range is within the type range then add it */
650 if (sval_fits(type
, min
) && sval_fits(type
, max
)) {
651 add_range(rl
, sval_cast(type
, min
), sval_cast(type
, max
));
656 * If the range we are adding has more bits than the range type then
657 * add the whole range type. Eg:
658 * 0x8000000000000000 - 0xf000000000000000 -> cast to int
659 * This isn't totally the right thing to do. We could be more granular.
661 if (sval_too_big(type
, min
) || sval_too_big(type
, max
)) {
662 add_range(rl
, sval_type_min(type
), sval_type_max(type
));
666 /* Cast negative values to high positive values */
667 if (sval_is_negative(min
) && type_unsigned(type
)) {
668 if (sval_is_positive(max
)) {
669 if (sval_too_high(type
, max
)) {
670 add_range(rl
, sval_type_min(type
), sval_type_max(type
));
673 add_range(rl
, sval_type_val(type
, 0), sval_cast(type
, max
));
674 max
= sval_type_max(type
);
676 max
= sval_cast(type
, max
);
678 min
= sval_cast(type
, min
);
679 add_range(rl
, min
, max
);
682 /* Cast high positive numbers to negative */
683 if (sval_unsigned(max
) && sval_is_negative(sval_cast(type
, max
))) {
684 if (!sval_is_negative(sval_cast(type
, min
))) {
685 add_range(rl
, sval_cast(type
, min
), sval_type_max(type
));
686 min
= sval_type_min(type
);
688 min
= sval_cast(type
, min
);
690 max
= sval_cast(type
, max
);
691 add_range(rl
, min
, max
);
697 struct range_list
*cast_rl(struct symbol
*type
, struct range_list
*rl
)
699 struct data_range
*tmp
;
700 struct range_list
*ret
= NULL
;
708 FOR_EACH_PTR(rl
, tmp
) {
709 add_range_t(type
, &ret
, tmp
->min
, tmp
->max
);
710 } END_FOR_EACH_PTR(tmp
);
713 return alloc_whole_rl(type
);
718 struct range_list
*rl_invert(struct range_list
*orig
)
720 struct range_list
*ret
= NULL
;
721 struct data_range
*tmp
;
722 sval_t gap_min
, abs_max
, sval
;
727 gap_min
= sval_type_min(rl_min(orig
).type
);
728 abs_max
= sval_type_max(rl_max(orig
).type
);
730 FOR_EACH_PTR(orig
, tmp
) {
731 if (sval_cmp(tmp
->min
, gap_min
) > 0) {
732 sval
= sval_type_val(tmp
->min
.type
, tmp
->min
.value
- 1);
733 add_range(&ret
, gap_min
, sval
);
735 gap_min
= sval_type_val(tmp
->max
.type
, tmp
->max
.value
+ 1);
736 if (sval_cmp(tmp
->max
, abs_max
) == 0)
738 } END_FOR_EACH_PTR(tmp
);
740 if (sval_cmp(gap_min
, abs_max
) < 0)
741 add_range(&ret
, gap_min
, abs_max
);
746 struct range_list
*rl_filter(struct range_list
*rl
, struct range_list
*filter
)
748 struct data_range
*tmp
;
750 FOR_EACH_PTR(filter
, tmp
) {
751 rl
= remove_range(rl
, tmp
->min
, tmp
->max
);
752 } END_FOR_EACH_PTR(tmp
);
757 struct range_list
*rl_intersection(struct range_list
*one
, struct range_list
*two
)
761 two
= rl_invert(two
);
762 return rl_filter(one
, two
);
765 void free_rl(struct range_list
**rlist
)
767 __free_ptr_list((struct ptr_list
**)rlist
);
770 static void free_single_dinfo(struct data_info
*dinfo
)
772 free_rl(&dinfo
->value_ranges
);
775 static void free_dinfos(struct allocation_blob
*blob
)
777 unsigned int size
= sizeof(struct data_info
);
778 unsigned int offset
= 0;
780 while (offset
< blob
->offset
) {
781 free_single_dinfo((struct data_info
*)(blob
->data
+ offset
));
786 void free_data_info_allocs(void)
788 struct allocator_struct
*desc
= &data_info_allocator
;
789 struct allocation_blob
*blob
= desc
->blobs
;
792 desc
->allocations
= 0;
793 desc
->total_bytes
= 0;
794 desc
->useful_bytes
= 0;
795 desc
->freelist
= NULL
;
797 struct allocation_blob
*next
= blob
->next
;
799 blob_free(blob
, desc
->chunking
);
802 clear_data_range_alloc();