2 * sparse/smatch_ranges.c
4 * Copyright (C) 2009 Dan Carpenter.
6 * Licensed under the Open Software License version 1.1
12 #include "smatch_extra.h"
13 #include "smatch_slist.h"
15 ALLOCATOR(data_info
, "smatch extra data");
16 ALLOCATOR(data_range
, "data range");
17 __DO_ALLOCATOR(struct data_range
, sizeof(struct data_range
), __alignof__(struct data_range
),
18 "permanent ranges", perm_data_range
);
20 char *show_ranges(struct range_list
*list
)
22 struct data_range
*tmp
;
28 FOR_EACH_PTR(list
, tmp
) {
30 strncat(full
, ",", 254 - strlen(full
));
31 if (sval_cmp(tmp
->min
, tmp
->max
) == 0) {
32 strncat(full
, sval_to_str(tmp
->min
), 254 - strlen(full
));
35 strncat(full
, sval_to_str(tmp
->min
), 254 - strlen(full
));
36 strncat(full
, "-", 254 - strlen(full
));
37 strncat(full
, sval_to_str(tmp
->max
), 254 - strlen(full
));
38 } END_FOR_EACH_PTR(tmp
);
39 return alloc_sname(full
);
42 static sval_t
parse_val(struct symbol
*type
, char *c
, char **endp
)
47 if (!strncmp(start
, "max", 3)) {
48 ret
= sval_type_max(type
);
50 } else if (!strncmp(start
, "u64max", 6)) {
51 ret
= sval_type_val(type
, ULLONG_MAX
);
53 } else if (!strncmp(start
, "s64max", 6)) {
54 ret
= sval_type_val(type
, LLONG_MAX
);
56 } else if (!strncmp(start
, "u32max", 6)) {
57 ret
= sval_type_val(type
, UINT_MAX
);
59 } else if (!strncmp(start
, "s32max", 6)) {
60 ret
= sval_type_val(type
, INT_MAX
);
62 } else if (!strncmp(start
, "u16max", 6)) {
63 ret
= sval_type_val(type
, USHRT_MAX
);
65 } else if (!strncmp(start
, "s16max", 6)) {
66 ret
= sval_type_val(type
, SHRT_MAX
);
68 } else if (!strncmp(start
, "min", 3)) {
69 ret
= sval_type_min(type
);
71 } else if (!strncmp(start
, "s64min", 6)) {
72 ret
= sval_type_val(type
, LLONG_MIN
);
74 } else if (!strncmp(start
, "s32min", 6)) {
75 ret
= sval_type_val(type
, INT_MIN
);
77 } else if (!strncmp(start
, "s16min", 6)) {
78 ret
= sval_type_val(type
, SHRT_MIN
);
81 ret
= sval_type_val(type
, strtoll(start
, &c
, 10));
87 void parse_value_ranges_type(struct symbol
*type
, char *value
, struct range_list
**rl
)
100 min
= parse_val(type
, c
, &c
);
104 add_range(rl
, min
, min
);
108 add_range(rl
, min
, min
);
113 sm_msg("debug XXX: trouble parsing %s ", value
);
119 max
= parse_val(type
, c
, &c
);
120 add_range(rl
, min
, max
);
126 sm_msg("debug YYY: trouble parsing %s %s", value
, c
);
132 *rl
= cast_rl(type
, *rl
);
135 int is_whole_range_rl(struct range_list
*rl
)
137 struct data_range
*drange
;
139 if (ptr_list_empty(rl
))
141 drange
= first_ptr_list((struct ptr_list
*)rl
);
142 if (sval_is_min(drange
->min
) && sval_is_max(drange
->max
))
147 sval_t
rl_min(struct range_list
*rl
)
149 struct data_range
*drange
;
152 ret
.type
= &llong_ctype
;
153 ret
.value
= LLONG_MIN
;
154 if (ptr_list_empty(rl
))
156 drange
= first_ptr_list((struct ptr_list
*)rl
);
160 sval_t
rl_max(struct range_list
*rl
)
162 struct data_range
*drange
;
165 ret
.type
= &llong_ctype
;
166 ret
.value
= LLONG_MAX
;
167 if (ptr_list_empty(rl
))
169 drange
= last_ptr_list((struct ptr_list
*)rl
);
173 static struct data_range
*alloc_range_helper_sval(sval_t min
, sval_t max
, int perm
)
175 struct data_range
*ret
;
178 ret
= __alloc_perm_data_range(0);
180 ret
= __alloc_data_range(0);
186 struct data_range
*alloc_range(sval_t min
, sval_t max
)
188 return alloc_range_helper_sval(min
, max
, 0);
191 struct data_range
*alloc_range_perm(sval_t min
, sval_t max
)
193 return alloc_range_helper_sval(min
, max
, 1);
196 struct range_list
*alloc_range_list(sval_t min
, sval_t max
)
198 struct range_list
*rl
= NULL
;
200 add_range(&rl
, min
, max
);
204 struct range_list
*whole_range_list(struct symbol
*type
)
209 return alloc_range_list(sval_type_min(type
), sval_type_max(type
));
212 void add_range(struct range_list
**list
, sval_t min
, sval_t max
)
214 struct data_range
*tmp
= NULL
;
215 struct data_range
*new = NULL
;
219 * FIXME: This has a problem merging a range_list like: min-0,3-max
220 * with a range like 1-2. You end up with min-2,3-max instead of
223 FOR_EACH_PTR(*list
, tmp
) {
225 /* Sometimes we overlap with more than one range
226 so we have to delete or modify the next range. */
227 if (max
.value
+ 1 == tmp
->min
.value
) {
228 /* join 2 ranges here */
230 DELETE_CURRENT_PTR(tmp
);
234 /* Doesn't overlap with the next one. */
235 if (sval_cmp(max
, tmp
->min
) < 0)
237 /* Partially overlaps with the next one. */
238 if (sval_cmp(max
, tmp
->max
) < 0) {
239 tmp
->min
.value
= max
.value
+ 1;
242 /* Completely overlaps with the next one. */
243 if (sval_cmp(max
, tmp
->max
) >= 0) {
244 DELETE_CURRENT_PTR(tmp
);
245 /* there could be more ranges to delete */
249 if (!sval_is_max(max
) && max
.value
+ 1 == tmp
->min
.value
) {
250 /* join 2 ranges into a big range */
251 new = alloc_range(min
, tmp
->max
);
252 REPLACE_CURRENT_PTR(tmp
, new);
255 if (sval_cmp(max
, tmp
->min
) < 0) { /* new range entirely below */
256 new = alloc_range(min
, max
);
257 INSERT_CURRENT(new, tmp
);
260 if (sval_cmp(min
, tmp
->min
) < 0) { /* new range partially below */
261 if (sval_cmp(max
, tmp
->max
) < 0)
265 new = alloc_range(min
, max
);
266 REPLACE_CURRENT_PTR(tmp
, new);
271 if (sval_cmp(max
, tmp
->max
) <= 0) /* new range already included */
273 if (sval_cmp(min
, tmp
->max
) <= 0) { /* new range partially above */
275 new = alloc_range(min
, max
);
276 REPLACE_CURRENT_PTR(tmp
, new);
280 if (!sval_is_min(min
) && min
.value
- 1 == tmp
->max
.value
) {
281 /* join 2 ranges into a big range */
282 new = alloc_range(tmp
->min
, max
);
283 REPLACE_CURRENT_PTR(tmp
, new);
287 /* the new range is entirely above the existing ranges */
288 } END_FOR_EACH_PTR(tmp
);
291 new = alloc_range(min
, max
);
292 add_ptr_list(list
, new);
295 struct range_list
*clone_range_list(struct range_list
*list
)
297 struct data_range
*tmp
;
298 struct range_list
*ret
= NULL
;
300 FOR_EACH_PTR(list
, tmp
) {
301 add_ptr_list(&ret
, tmp
);
302 } END_FOR_EACH_PTR(tmp
);
306 struct range_list
*clone_permanent(struct range_list
*list
)
308 struct data_range
*tmp
;
309 struct data_range
*new;
310 struct range_list
*ret
= NULL
;
312 FOR_EACH_PTR(list
, tmp
) {
313 new = alloc_range_perm(tmp
->min
, tmp
->max
);
314 add_ptr_list(&ret
, new);
315 } END_FOR_EACH_PTR(tmp
);
319 struct range_list
*range_list_union(struct range_list
*one
, struct range_list
*two
)
321 struct data_range
*tmp
;
322 struct range_list
*ret
= NULL
;
324 FOR_EACH_PTR(one
, tmp
) {
325 add_range(&ret
, tmp
->min
, tmp
->max
);
326 } END_FOR_EACH_PTR(tmp
);
327 FOR_EACH_PTR(two
, tmp
) {
328 add_range(&ret
, tmp
->min
, tmp
->max
);
329 } END_FOR_EACH_PTR(tmp
);
333 struct range_list
*remove_range(struct range_list
*list
, sval_t min
, sval_t max
)
335 struct data_range
*tmp
;
336 struct range_list
*ret
= NULL
;
338 FOR_EACH_PTR(list
, tmp
) {
339 if (sval_cmp(tmp
->max
, min
) < 0) {
340 add_range(&ret
, tmp
->min
, tmp
->max
);
343 if (sval_cmp(tmp
->min
, max
) > 0) {
344 add_range(&ret
, tmp
->min
, tmp
->max
);
347 if (sval_cmp(tmp
->min
, min
) >= 0 && sval_cmp(tmp
->max
, max
) <= 0)
349 if (sval_cmp(tmp
->min
, min
) >= 0) {
351 add_range(&ret
, max
, tmp
->max
);
352 } else if (sval_cmp(tmp
->max
, max
) <= 0) {
354 add_range(&ret
, tmp
->min
, min
);
358 add_range(&ret
, tmp
->min
, min
);
359 add_range(&ret
, max
, tmp
->max
);
361 } END_FOR_EACH_PTR(tmp
);
365 int ranges_equiv(struct data_range
*one
, struct data_range
*two
)
371 if (sval_cmp(one
->min
, two
->min
) != 0)
373 if (sval_cmp(one
->max
, two
->max
) != 0)
378 int range_lists_equiv(struct range_list
*one
, struct range_list
*two
)
380 struct data_range
*one_range
;
381 struct data_range
*two_range
;
383 PREPARE_PTR_LIST(one
, one_range
);
384 PREPARE_PTR_LIST(two
, two_range
);
386 if (!one_range
&& !two_range
)
388 if (!ranges_equiv(one_range
, two_range
))
390 NEXT_PTR_LIST(one_range
);
391 NEXT_PTR_LIST(two_range
);
393 FINISH_PTR_LIST(two_range
);
394 FINISH_PTR_LIST(one_range
);
399 int true_comparison_range(struct data_range
*left
, int comparison
, struct data_range
*right
)
401 switch (comparison
) {
403 case SPECIAL_UNSIGNED_LT
:
404 if (sval_cmp(left
->min
, right
->max
) < 0)
407 case SPECIAL_UNSIGNED_LTE
:
409 if (sval_cmp(left
->min
, right
->max
) <= 0)
413 if (sval_cmp(left
->max
, right
->min
) < 0)
415 if (sval_cmp(left
->min
, right
->max
) > 0)
418 case SPECIAL_UNSIGNED_GTE
:
420 if (sval_cmp(left
->max
, right
->min
) >= 0)
424 case SPECIAL_UNSIGNED_GT
:
425 if (sval_cmp(left
->max
, right
->min
) > 0)
428 case SPECIAL_NOTEQUAL
:
429 if (sval_cmp(left
->min
, left
->max
) != 0)
431 if (sval_cmp(right
->min
, right
->max
) != 0)
433 if (sval_cmp(left
->min
, right
->min
) != 0)
437 sm_msg("unhandled comparison %d\n", comparison
);
443 int true_comparison_range_lr(int comparison
, struct data_range
*var
, struct data_range
*val
, int left
)
446 return true_comparison_range(var
, comparison
, val
);
448 return true_comparison_range(val
, comparison
, var
);
451 static int false_comparison_range_sval(struct data_range
*left
, int comparison
, struct data_range
*right
)
453 switch (comparison
) {
455 case SPECIAL_UNSIGNED_LT
:
456 if (sval_cmp(left
->max
, right
->min
) >= 0)
459 case SPECIAL_UNSIGNED_LTE
:
461 if (sval_cmp(left
->max
, right
->min
) > 0)
465 if (sval_cmp(left
->min
, left
->max
) != 0)
467 if (sval_cmp(right
->min
, right
->max
) != 0)
469 if (sval_cmp(left
->min
, right
->min
) != 0)
472 case SPECIAL_UNSIGNED_GTE
:
474 if (sval_cmp(left
->min
, right
->max
) < 0)
478 case SPECIAL_UNSIGNED_GT
:
479 if (sval_cmp(left
->min
, right
->max
) <= 0)
482 case SPECIAL_NOTEQUAL
:
483 if (sval_cmp(left
->max
, right
->min
) < 0)
485 if (sval_cmp(left
->min
, right
->max
) > 0)
489 sm_msg("unhandled comparison %d\n", comparison
);
495 int false_comparison_range_lr(int comparison
, struct data_range
*var
, struct data_range
*val
, int left
)
498 return false_comparison_range_sval(var
, comparison
, val
);
500 return false_comparison_range_sval(val
, comparison
, var
);
503 int possibly_true(struct expression
*left
, int comparison
, struct expression
*right
)
505 struct range_list
*rl_left
, *rl_right
;
506 struct data_range
*tmp_left
, *tmp_right
;
508 if (!get_implied_range_list(left
, &rl_left
))
510 if (!get_implied_range_list(right
, &rl_right
))
513 FOR_EACH_PTR(rl_left
, tmp_left
) {
514 FOR_EACH_PTR(rl_right
, tmp_right
) {
515 if (true_comparison_range(tmp_left
, comparison
, tmp_right
))
517 } END_FOR_EACH_PTR(tmp_right
);
518 } END_FOR_EACH_PTR(tmp_left
);
522 int possibly_false(struct expression
*left
, int comparison
, struct expression
*right
)
524 struct range_list
*rl_left
, *rl_right
;
525 struct data_range
*tmp_left
, *tmp_right
;
527 if (!get_implied_range_list(left
, &rl_left
))
529 if (!get_implied_range_list(right
, &rl_right
))
532 FOR_EACH_PTR(rl_left
, tmp_left
) {
533 FOR_EACH_PTR(rl_right
, tmp_right
) {
534 if (false_comparison_range_sval(tmp_left
, comparison
, tmp_right
))
536 } END_FOR_EACH_PTR(tmp_right
);
537 } END_FOR_EACH_PTR(tmp_left
);
541 int possibly_true_range_lists(struct range_list
*left_ranges
, int comparison
, struct range_list
*right_ranges
)
543 struct data_range
*left_tmp
, *right_tmp
;
545 if (!left_ranges
|| !right_ranges
)
548 FOR_EACH_PTR(left_ranges
, left_tmp
) {
549 FOR_EACH_PTR(right_ranges
, right_tmp
) {
550 if (true_comparison_range(left_tmp
, comparison
, right_tmp
))
552 } END_FOR_EACH_PTR(right_tmp
);
553 } END_FOR_EACH_PTR(left_tmp
);
557 int possibly_false_range_lists(struct range_list
*left_ranges
, int comparison
, struct range_list
*right_ranges
)
559 struct data_range
*left_tmp
, *right_tmp
;
561 if (!left_ranges
|| !right_ranges
)
564 FOR_EACH_PTR(left_ranges
, left_tmp
) {
565 FOR_EACH_PTR(right_ranges
, right_tmp
) {
566 if (false_comparison_range_sval(left_tmp
, comparison
, right_tmp
))
568 } END_FOR_EACH_PTR(right_tmp
);
569 } END_FOR_EACH_PTR(left_tmp
);
573 /* FIXME: the _rl here stands for right left so really it should be _lr */
574 int possibly_true_range_lists_lr(int comparison
, struct range_list
*a
, struct range_list
*b
, int left
)
577 return possibly_true_range_lists(a
, comparison
, b
);
579 return possibly_true_range_lists(b
, comparison
, a
);
582 int possibly_false_range_lists_lr(int comparison
, struct range_list
*a
, struct range_list
*b
, int left
)
585 return possibly_false_range_lists(a
, comparison
, b
);
587 return possibly_false_range_lists(b
, comparison
, a
);
590 void tack_on(struct range_list
**list
, struct data_range
*drange
)
592 add_ptr_list(list
, drange
);
595 void push_range_list(struct range_list_stack
**rl_stack
, struct range_list
*rl
)
597 add_ptr_list(rl_stack
, rl
);
600 struct range_list
*pop_range_list(struct range_list_stack
**rl_stack
)
602 struct range_list
*rl
;
604 rl
= last_ptr_list((struct ptr_list
*)*rl_stack
);
605 delete_ptr_list_last((struct ptr_list
**)rl_stack
);
609 struct range_list
*top_range_list(struct range_list_stack
*rl_stack
)
611 struct range_list
*rl
;
613 rl
= last_ptr_list((struct ptr_list
*)rl_stack
);
617 void filter_top_range_list(struct range_list_stack
**rl_stack
, sval_t sval
)
619 struct range_list
*rl
;
621 rl
= pop_range_list(rl_stack
);
622 rl
= remove_range(rl
, sval
, sval
);
623 push_range_list(rl_stack
, rl
);
626 struct range_list
*cast_rl(struct symbol
*type
, struct range_list
*rl
)
628 struct data_range
*tmp
;
629 struct data_range
*new;
630 struct range_list
*ret
= NULL
;
631 int set_min
, set_max
;
637 return clone_range_list(rl
);
639 if (sval_cmp(rl_min(rl
), rl_max(rl
)) == 0) {
640 sval_t sval
= sval_cast(type
, rl_min(rl
));
641 return alloc_range_list(sval
, sval
);
645 if (type_unsigned(type
) && sval_cmp_val(rl_min(rl
), 0) < 0)
649 if (type_signed(type
) && sval_cmp(rl_max(rl
), sval_type_max(type
)) > 0)
652 if (sval_positive_bits(rl_max(rl
)) > type_positive_bits(type
) &&
653 sval_cmp(rl_max(rl
), sval_type_max(type
)) > 0)
656 FOR_EACH_PTR(rl
, tmp
) {
662 if (sval_cmp_t(type
, max
, sval_type_min(type
)) < 0)
664 if (sval_cmp_t(type
, min
, sval_type_max(type
)) > 0)
666 if (sval_cmp_val(min
, 0) < 0 && type_unsigned(type
))
668 new = alloc_range(sval_cast(type
, min
), sval_cast(type
, max
));
669 add_ptr_list(&ret
, new);
670 } END_FOR_EACH_PTR(tmp
);
673 return whole_range_list(type
);
676 tmp
= first_ptr_list((struct ptr_list
*)ret
);
677 tmp
->min
= sval_type_min(type
);
680 tmp
= last_ptr_list((struct ptr_list
*)ret
);
681 tmp
->max
= sval_type_max(type
);
687 void free_range_list(struct range_list
**rlist
)
689 __free_ptr_list((struct ptr_list
**)rlist
);
692 static void free_single_dinfo(struct data_info
*dinfo
)
694 if (dinfo
->type
== DATA_RANGE
)
695 free_range_list(&dinfo
->value_ranges
);
698 static void free_dinfos(struct allocation_blob
*blob
)
700 unsigned int size
= sizeof(struct data_info
);
701 unsigned int offset
= 0;
703 while (offset
< blob
->offset
) {
704 free_single_dinfo((struct data_info
*)(blob
->data
+ offset
));
709 void free_data_info_allocs(void)
711 struct allocator_struct
*desc
= &data_info_allocator
;
712 struct allocation_blob
*blob
= desc
->blobs
;
715 desc
->allocations
= 0;
716 desc
->total_bytes
= 0;
717 desc
->useful_bytes
= 0;
718 desc
->freelist
= NULL
;
720 struct allocation_blob
*next
= blob
->next
;
722 blob_free(blob
, desc
->chunking
);
725 clear_data_range_alloc();