2 * sparse/smatch_ranges.c
4 * Copyright (C) 2009 Dan Carpenter.
6 * Licensed under the Open Software License version 1.1
12 #include "smatch_extra.h"
13 #include "smatch_slist.h"
15 ALLOCATOR(data_info
, "smatch extra data");
16 ALLOCATOR(data_range
, "data range");
17 __DO_ALLOCATOR(struct data_range
, sizeof(struct data_range
), __alignof__(struct data_range
),
18 "permanent ranges", perm_data_range
);
20 char *show_ranges(struct range_list
*list
)
22 struct data_range
*tmp
;
28 FOR_EACH_PTR(list
, tmp
) {
30 strncat(full
, ",", 254 - strlen(full
));
31 if (sval_cmp(tmp
->min
, tmp
->max
) == 0) {
32 strncat(full
, sval_to_str(tmp
->min
), 254 - strlen(full
));
35 strncat(full
, sval_to_str(tmp
->min
), 254 - strlen(full
));
36 strncat(full
, "-", 254 - strlen(full
));
37 strncat(full
, sval_to_str(tmp
->max
), 254 - strlen(full
));
38 } END_FOR_EACH_PTR(tmp
);
39 return alloc_sname(full
);
42 static sval_t
parse_val(struct symbol
*type
, char *c
, char **endp
)
47 if (!strncmp(start
, "max", 3)) {
48 ret
= sval_type_max(type
);
50 } else if (!strncmp(start
, "u64max", 6)) {
51 ret
= sval_type_val(type
, ULLONG_MAX
);
53 } else if (!strncmp(start
, "s64max", 6)) {
54 ret
= sval_type_val(type
, LLONG_MAX
);
56 } else if (!strncmp(start
, "u32max", 6)) {
57 ret
= sval_type_val(type
, UINT_MAX
);
59 } else if (!strncmp(start
, "s32max", 6)) {
60 ret
= sval_type_val(type
, INT_MAX
);
62 } else if (!strncmp(start
, "u16max", 6)) {
63 ret
= sval_type_val(type
, USHRT_MAX
);
65 } else if (!strncmp(start
, "s16max", 6)) {
66 ret
= sval_type_val(type
, SHRT_MAX
);
68 } else if (!strncmp(start
, "min", 3)) {
69 ret
= sval_type_min(type
);
71 } else if (!strncmp(start
, "s64min", 6)) {
72 ret
= sval_type_val(type
, LLONG_MIN
);
74 } else if (!strncmp(start
, "s32min", 6)) {
75 ret
= sval_type_val(type
, INT_MIN
);
77 } else if (!strncmp(start
, "s16min", 6)) {
78 ret
= sval_type_val(type
, SHRT_MIN
);
81 ret
= sval_type_val(type
, strtoll(start
, &c
, 10));
87 void str_to_rl(struct symbol
*type
, char *value
, struct range_list
**rl
)
100 min
= parse_val(type
, c
, &c
);
104 add_range(rl
, min
, min
);
108 add_range(rl
, min
, min
);
113 sm_msg("debug XXX: trouble parsing %s ", value
);
119 max
= parse_val(type
, c
, &c
);
120 add_range(rl
, min
, max
);
126 sm_msg("debug YYY: trouble parsing %s %s", value
, c
);
132 *rl
= cast_rl(type
, *rl
);
135 int is_whole_rl(struct range_list
*rl
)
137 struct data_range
*drange
;
139 if (ptr_list_empty(rl
))
141 drange
= first_ptr_list((struct ptr_list
*)rl
);
142 if (sval_is_min(drange
->min
) && sval_is_max(drange
->max
))
147 sval_t
rl_min(struct range_list
*rl
)
149 struct data_range
*drange
;
152 ret
.type
= &llong_ctype
;
153 ret
.value
= LLONG_MIN
;
154 if (ptr_list_empty(rl
))
156 drange
= first_ptr_list((struct ptr_list
*)rl
);
160 sval_t
rl_max(struct range_list
*rl
)
162 struct data_range
*drange
;
165 ret
.type
= &llong_ctype
;
166 ret
.value
= LLONG_MAX
;
167 if (ptr_list_empty(rl
))
169 drange
= last_ptr_list((struct ptr_list
*)rl
);
173 static struct data_range
*alloc_range_helper_sval(sval_t min
, sval_t max
, int perm
)
175 struct data_range
*ret
;
178 ret
= __alloc_perm_data_range(0);
180 ret
= __alloc_data_range(0);
186 struct data_range
*alloc_range(sval_t min
, sval_t max
)
188 return alloc_range_helper_sval(min
, max
, 0);
191 struct data_range
*alloc_range_perm(sval_t min
, sval_t max
)
193 return alloc_range_helper_sval(min
, max
, 1);
196 struct range_list
*alloc_rl(sval_t min
, sval_t max
)
198 struct range_list
*rl
= NULL
;
200 add_range(&rl
, min
, max
);
204 struct range_list
*alloc_whole_rl(struct symbol
*type
)
209 return alloc_rl(sval_type_min(type
), sval_type_max(type
));
212 void add_range(struct range_list
**list
, sval_t min
, sval_t max
)
214 struct data_range
*tmp
= NULL
;
215 struct data_range
*new = NULL
;
219 * FIXME: This has a problem merging a range_list like: min-0,3-max
220 * with a range like 1-2. You end up with min-2,3-max instead of
223 FOR_EACH_PTR(*list
, tmp
) {
225 /* Sometimes we overlap with more than one range
226 so we have to delete or modify the next range. */
227 if (max
.value
+ 1 == tmp
->min
.value
) {
228 /* join 2 ranges here */
230 DELETE_CURRENT_PTR(tmp
);
234 /* Doesn't overlap with the next one. */
235 if (sval_cmp(max
, tmp
->min
) < 0)
237 /* Partially overlaps with the next one. */
238 if (sval_cmp(max
, tmp
->max
) < 0) {
239 tmp
->min
.value
= max
.value
+ 1;
242 /* Completely overlaps with the next one. */
243 if (sval_cmp(max
, tmp
->max
) >= 0) {
244 DELETE_CURRENT_PTR(tmp
);
245 /* there could be more ranges to delete */
249 if (!sval_is_max(max
) && max
.value
+ 1 == tmp
->min
.value
) {
250 /* join 2 ranges into a big range */
251 new = alloc_range(min
, tmp
->max
);
252 REPLACE_CURRENT_PTR(tmp
, new);
255 if (sval_cmp(max
, tmp
->min
) < 0) { /* new range entirely below */
256 new = alloc_range(min
, max
);
257 INSERT_CURRENT(new, tmp
);
260 if (sval_cmp(min
, tmp
->min
) < 0) { /* new range partially below */
261 if (sval_cmp(max
, tmp
->max
) < 0)
265 new = alloc_range(min
, max
);
266 REPLACE_CURRENT_PTR(tmp
, new);
271 if (sval_cmp(max
, tmp
->max
) <= 0) /* new range already included */
273 if (sval_cmp(min
, tmp
->max
) <= 0) { /* new range partially above */
275 new = alloc_range(min
, max
);
276 REPLACE_CURRENT_PTR(tmp
, new);
280 if (!sval_is_min(min
) && min
.value
- 1 == tmp
->max
.value
) {
281 /* join 2 ranges into a big range */
282 new = alloc_range(tmp
->min
, max
);
283 REPLACE_CURRENT_PTR(tmp
, new);
287 /* the new range is entirely above the existing ranges */
288 } END_FOR_EACH_PTR(tmp
);
291 new = alloc_range(min
, max
);
292 add_ptr_list(list
, new);
295 struct range_list
*clone_rl(struct range_list
*list
)
297 struct data_range
*tmp
;
298 struct range_list
*ret
= NULL
;
300 FOR_EACH_PTR(list
, tmp
) {
301 add_ptr_list(&ret
, tmp
);
302 } END_FOR_EACH_PTR(tmp
);
306 struct range_list
*clone_rl_permanent(struct range_list
*list
)
308 struct data_range
*tmp
;
309 struct data_range
*new;
310 struct range_list
*ret
= NULL
;
312 FOR_EACH_PTR(list
, tmp
) {
313 new = alloc_range_perm(tmp
->min
, tmp
->max
);
314 add_ptr_list(&ret
, new);
315 } END_FOR_EACH_PTR(tmp
);
319 struct range_list
*rl_union(struct range_list
*one
, struct range_list
*two
)
321 struct data_range
*tmp
;
322 struct range_list
*ret
= NULL
;
324 FOR_EACH_PTR(one
, tmp
) {
325 add_range(&ret
, tmp
->min
, tmp
->max
);
326 } END_FOR_EACH_PTR(tmp
);
327 FOR_EACH_PTR(two
, tmp
) {
328 add_range(&ret
, tmp
->min
, tmp
->max
);
329 } END_FOR_EACH_PTR(tmp
);
333 struct range_list
*remove_range(struct range_list
*list
, sval_t min
, sval_t max
)
335 struct data_range
*tmp
;
336 struct range_list
*ret
= NULL
;
338 FOR_EACH_PTR(list
, tmp
) {
339 if (sval_cmp(tmp
->max
, min
) < 0) {
340 add_range(&ret
, tmp
->min
, tmp
->max
);
343 if (sval_cmp(tmp
->min
, max
) > 0) {
344 add_range(&ret
, tmp
->min
, tmp
->max
);
347 if (sval_cmp(tmp
->min
, min
) >= 0 && sval_cmp(tmp
->max
, max
) <= 0)
349 if (sval_cmp(tmp
->min
, min
) >= 0) {
351 add_range(&ret
, max
, tmp
->max
);
352 } else if (sval_cmp(tmp
->max
, max
) <= 0) {
354 add_range(&ret
, tmp
->min
, min
);
358 add_range(&ret
, tmp
->min
, min
);
359 add_range(&ret
, max
, tmp
->max
);
361 } END_FOR_EACH_PTR(tmp
);
365 int ranges_equiv(struct data_range
*one
, struct data_range
*two
)
371 if (sval_cmp(one
->min
, two
->min
) != 0)
373 if (sval_cmp(one
->max
, two
->max
) != 0)
378 int rl_equiv(struct range_list
*one
, struct range_list
*two
)
380 struct data_range
*one_range
;
381 struct data_range
*two_range
;
386 PREPARE_PTR_LIST(one
, one_range
);
387 PREPARE_PTR_LIST(two
, two_range
);
389 if (!one_range
&& !two_range
)
391 if (!ranges_equiv(one_range
, two_range
))
393 NEXT_PTR_LIST(one_range
);
394 NEXT_PTR_LIST(two_range
);
396 FINISH_PTR_LIST(two_range
);
397 FINISH_PTR_LIST(one_range
);
402 int true_comparison_range(struct data_range
*left
, int comparison
, struct data_range
*right
)
404 switch (comparison
) {
406 case SPECIAL_UNSIGNED_LT
:
407 if (sval_cmp(left
->min
, right
->max
) < 0)
410 case SPECIAL_UNSIGNED_LTE
:
412 if (sval_cmp(left
->min
, right
->max
) <= 0)
416 if (sval_cmp(left
->max
, right
->min
) < 0)
418 if (sval_cmp(left
->min
, right
->max
) > 0)
421 case SPECIAL_UNSIGNED_GTE
:
423 if (sval_cmp(left
->max
, right
->min
) >= 0)
427 case SPECIAL_UNSIGNED_GT
:
428 if (sval_cmp(left
->max
, right
->min
) > 0)
431 case SPECIAL_NOTEQUAL
:
432 if (sval_cmp(left
->min
, left
->max
) != 0)
434 if (sval_cmp(right
->min
, right
->max
) != 0)
436 if (sval_cmp(left
->min
, right
->min
) != 0)
440 sm_msg("unhandled comparison %d\n", comparison
);
446 int true_comparison_range_LR(int comparison
, struct data_range
*var
, struct data_range
*val
, int left
)
449 return true_comparison_range(var
, comparison
, val
);
451 return true_comparison_range(val
, comparison
, var
);
454 static int false_comparison_range_sval(struct data_range
*left
, int comparison
, struct data_range
*right
)
456 switch (comparison
) {
458 case SPECIAL_UNSIGNED_LT
:
459 if (sval_cmp(left
->max
, right
->min
) >= 0)
462 case SPECIAL_UNSIGNED_LTE
:
464 if (sval_cmp(left
->max
, right
->min
) > 0)
468 if (sval_cmp(left
->min
, left
->max
) != 0)
470 if (sval_cmp(right
->min
, right
->max
) != 0)
472 if (sval_cmp(left
->min
, right
->min
) != 0)
475 case SPECIAL_UNSIGNED_GTE
:
477 if (sval_cmp(left
->min
, right
->max
) < 0)
481 case SPECIAL_UNSIGNED_GT
:
482 if (sval_cmp(left
->min
, right
->max
) <= 0)
485 case SPECIAL_NOTEQUAL
:
486 if (sval_cmp(left
->max
, right
->min
) < 0)
488 if (sval_cmp(left
->min
, right
->max
) > 0)
492 sm_msg("unhandled comparison %d\n", comparison
);
498 int false_comparison_range_LR(int comparison
, struct data_range
*var
, struct data_range
*val
, int left
)
501 return false_comparison_range_sval(var
, comparison
, val
);
503 return false_comparison_range_sval(val
, comparison
, var
);
506 int possibly_true(struct expression
*left
, int comparison
, struct expression
*right
)
508 struct range_list
*rl_left
, *rl_right
;
509 struct data_range
*tmp_left
, *tmp_right
;
511 if (!get_implied_rl(left
, &rl_left
))
513 if (!get_implied_rl(right
, &rl_right
))
516 FOR_EACH_PTR(rl_left
, tmp_left
) {
517 FOR_EACH_PTR(rl_right
, tmp_right
) {
518 if (true_comparison_range(tmp_left
, comparison
, tmp_right
))
520 } END_FOR_EACH_PTR(tmp_right
);
521 } END_FOR_EACH_PTR(tmp_left
);
525 int possibly_false(struct expression
*left
, int comparison
, struct expression
*right
)
527 struct range_list
*rl_left
, *rl_right
;
528 struct data_range
*tmp_left
, *tmp_right
;
530 if (!get_implied_rl(left
, &rl_left
))
532 if (!get_implied_rl(right
, &rl_right
))
535 FOR_EACH_PTR(rl_left
, tmp_left
) {
536 FOR_EACH_PTR(rl_right
, tmp_right
) {
537 if (false_comparison_range_sval(tmp_left
, comparison
, tmp_right
))
539 } END_FOR_EACH_PTR(tmp_right
);
540 } END_FOR_EACH_PTR(tmp_left
);
544 int possibly_true_rl(struct range_list
*left_ranges
, int comparison
, struct range_list
*right_ranges
)
546 struct data_range
*left_tmp
, *right_tmp
;
548 if (!left_ranges
|| !right_ranges
)
551 FOR_EACH_PTR(left_ranges
, left_tmp
) {
552 FOR_EACH_PTR(right_ranges
, right_tmp
) {
553 if (true_comparison_range(left_tmp
, comparison
, right_tmp
))
555 } END_FOR_EACH_PTR(right_tmp
);
556 } END_FOR_EACH_PTR(left_tmp
);
560 int possibly_false_rl(struct range_list
*left_ranges
, int comparison
, struct range_list
*right_ranges
)
562 struct data_range
*left_tmp
, *right_tmp
;
564 if (!left_ranges
|| !right_ranges
)
567 FOR_EACH_PTR(left_ranges
, left_tmp
) {
568 FOR_EACH_PTR(right_ranges
, right_tmp
) {
569 if (false_comparison_range_sval(left_tmp
, comparison
, right_tmp
))
571 } END_FOR_EACH_PTR(right_tmp
);
572 } END_FOR_EACH_PTR(left_tmp
);
576 /* FIXME: the _rl here stands for right left so really it should be _lr */
577 int possibly_true_rl_LR(int comparison
, struct range_list
*a
, struct range_list
*b
, int left
)
580 return possibly_true_rl(a
, comparison
, b
);
582 return possibly_true_rl(b
, comparison
, a
);
585 int possibly_false_rl_LR(int comparison
, struct range_list
*a
, struct range_list
*b
, int left
)
588 return possibly_false_rl(a
, comparison
, b
);
590 return possibly_false_rl(b
, comparison
, a
);
593 void tack_on(struct range_list
**list
, struct data_range
*drange
)
595 add_ptr_list(list
, drange
);
598 void push_rl(struct range_list_stack
**rl_stack
, struct range_list
*rl
)
600 add_ptr_list(rl_stack
, rl
);
603 struct range_list
*pop_rl(struct range_list_stack
**rl_stack
)
605 struct range_list
*rl
;
607 rl
= last_ptr_list((struct ptr_list
*)*rl_stack
);
608 delete_ptr_list_last((struct ptr_list
**)rl_stack
);
612 struct range_list
*top_rl(struct range_list_stack
*rl_stack
)
614 struct range_list
*rl
;
616 rl
= last_ptr_list((struct ptr_list
*)rl_stack
);
620 void filter_top_rl(struct range_list_stack
**rl_stack
, sval_t sval
)
622 struct range_list
*rl
;
624 rl
= pop_rl(rl_stack
);
625 rl
= remove_range(rl
, sval
, sval
);
626 push_rl(rl_stack
, rl
);
629 static int sval_too_big(struct symbol
*type
, sval_t sval
)
631 if (type_bits(type
) == 64)
633 if (sval
.uvalue
> ((1ULL << type_bits(type
)) - 1))
638 static void add_range_t(struct symbol
*type
, struct range_list
**rl
, sval_t min
, sval_t max
)
640 /* If we're just adding a number, cast it and add it */
641 if (sval_cmp(min
, max
) == 0) {
642 add_range(rl
, sval_cast(type
, min
), sval_cast(type
, max
));
646 /* If the range is within the type range then add it */
647 if (sval_fits(type
, min
) && sval_fits(type
, max
)) {
648 add_range(rl
, sval_cast(type
, min
), sval_cast(type
, max
));
653 * If the range we are adding has more bits than the range type then
654 * add the whole range type. Eg:
655 * 0x8000000000000000 - 0xf000000000000000 -> cast to int
656 * This isn't totally the right thing to do. We could be more granular.
658 if (sval_too_big(type
, min
) || sval_too_big(type
, max
)) {
659 add_range(rl
, sval_type_min(type
), sval_type_max(type
));
663 /* Cast negative values to high positive values */
664 if (sval_is_negative(min
) && type_unsigned(type
)) {
665 if (sval_is_positive(max
)) {
666 if (sval_too_high(type
, max
)) {
667 add_range(rl
, sval_type_min(type
), sval_type_max(type
));
670 add_range(rl
, sval_type_val(type
, 0), sval_cast(type
, max
));
671 max
= sval_type_max(type
);
673 max
= sval_cast(type
, max
);
675 min
= sval_cast(type
, min
);
676 add_range(rl
, min
, max
);
679 /* Cast high positive numbers to negative */
680 if (sval_unsigned(max
) && sval_is_negative(sval_cast(type
, max
))) {
681 if (!sval_is_negative(sval_cast(type
, min
))) {
682 add_range(rl
, sval_cast(type
, min
), sval_type_max(type
));
683 min
= sval_type_min(type
);
685 min
= sval_cast(type
, min
);
687 max
= sval_cast(type
, max
);
688 add_range(rl
, min
, max
);
694 struct range_list
*cast_rl(struct symbol
*type
, struct range_list
*rl
)
696 struct data_range
*tmp
;
697 struct range_list
*ret
= NULL
;
705 FOR_EACH_PTR(rl
, tmp
) {
706 add_range_t(type
, &ret
, tmp
->min
, tmp
->max
);
707 } END_FOR_EACH_PTR(tmp
);
710 return alloc_whole_rl(type
);
715 struct range_list
*rl_invert(struct range_list
*orig
)
717 struct range_list
*ret
= NULL
;
718 struct data_range
*tmp
;
719 sval_t gap_min
, abs_max
, sval
;
724 gap_min
= sval_type_min(rl_min(orig
).type
);
725 abs_max
= sval_type_max(rl_max(orig
).type
);
727 FOR_EACH_PTR(orig
, tmp
) {
728 if (sval_cmp(tmp
->min
, gap_min
) > 0) {
729 sval
= sval_type_val(tmp
->min
.type
, tmp
->min
.value
- 1);
730 add_range(&ret
, gap_min
, sval
);
732 gap_min
= sval_type_val(tmp
->max
.type
, tmp
->max
.value
+ 1);
733 if (sval_cmp(tmp
->max
, abs_max
) == 0)
735 } END_FOR_EACH_PTR(tmp
);
737 if (sval_cmp(gap_min
, abs_max
) < 0)
738 add_range(&ret
, gap_min
, abs_max
);
743 struct range_list
*rl_filter(struct range_list
*rl
, struct range_list
*filter
)
745 struct data_range
*tmp
;
747 FOR_EACH_PTR(filter
, tmp
) {
748 rl
= remove_range(rl
, tmp
->min
, tmp
->max
);
749 } END_FOR_EACH_PTR(tmp
);
754 struct range_list
*rl_intersection(struct range_list
*one
, struct range_list
*two
)
758 two
= rl_invert(two
);
759 return rl_filter(one
, two
);
762 void free_rl(struct range_list
**rlist
)
764 __free_ptr_list((struct ptr_list
**)rlist
);
767 static void free_single_dinfo(struct data_info
*dinfo
)
769 if (dinfo
->type
== DATA_RANGE
)
770 free_rl(&dinfo
->value_ranges
);
773 static void free_dinfos(struct allocation_blob
*blob
)
775 unsigned int size
= sizeof(struct data_info
);
776 unsigned int offset
= 0;
778 while (offset
< blob
->offset
) {
779 free_single_dinfo((struct data_info
*)(blob
->data
+ offset
));
784 void free_data_info_allocs(void)
786 struct allocator_struct
*desc
= &data_info_allocator
;
787 struct allocation_blob
*blob
= desc
->blobs
;
790 desc
->allocations
= 0;
791 desc
->total_bytes
= 0;
792 desc
->useful_bytes
= 0;
793 desc
->freelist
= NULL
;
795 struct allocation_blob
*next
= blob
->next
;
797 blob_free(blob
, desc
->chunking
);
800 clear_data_range_alloc();