estate: rename is_whole_range() to estate_is_whole()
[smatch.git] / smatch_ranges.c
blob8149541689a267a54b5e9b091fc9046993941f92
1 /*
2 * sparse/smatch_ranges.c
4 * Copyright (C) 2009 Dan Carpenter.
6 * Licensed under the Open Software License version 1.1
8 */
10 #include "parse.h"
11 #include "smatch.h"
12 #include "smatch_extra.h"
13 #include "smatch_slist.h"
15 ALLOCATOR(data_info, "smatch extra data");
16 ALLOCATOR(data_range, "data range");
17 __DO_ALLOCATOR(struct data_range, sizeof(struct data_range), __alignof__(struct data_range),
18 "permanent ranges", perm_data_range);
20 char *show_ranges(struct range_list *list)
22 struct data_range *tmp;
23 char full[256];
24 int i = 0;
26 full[0] = '\0';
27 full[255] = '\0';
28 FOR_EACH_PTR(list, tmp) {
29 if (i++)
30 strncat(full, ",", 254 - strlen(full));
31 if (sval_cmp(tmp->min, tmp->max) == 0) {
32 strncat(full, sval_to_str(tmp->min), 254 - strlen(full));
33 continue;
35 strncat(full, sval_to_str(tmp->min), 254 - strlen(full));
36 strncat(full, "-", 254 - strlen(full));
37 strncat(full, sval_to_str(tmp->max), 254 - strlen(full));
38 } END_FOR_EACH_PTR(tmp);
39 return alloc_sname(full);
42 static sval_t parse_val(struct symbol *type, char *c, char **endp)
44 char *start = c;
45 sval_t ret;
47 if (!strncmp(start, "max", 3)) {
48 ret = sval_type_max(type);
49 c += 3;
50 } else if (!strncmp(start, "u64max", 6)) {
51 ret = sval_type_val(type, ULLONG_MAX);
52 c += 6;
53 } else if (!strncmp(start, "s64max", 6)) {
54 ret = sval_type_val(type, LLONG_MAX);
55 c += 6;
56 } else if (!strncmp(start, "u32max", 6)) {
57 ret = sval_type_val(type, UINT_MAX);
58 c += 6;
59 } else if (!strncmp(start, "s32max", 6)) {
60 ret = sval_type_val(type, INT_MAX);
61 c += 6;
62 } else if (!strncmp(start, "u16max", 6)) {
63 ret = sval_type_val(type, USHRT_MAX);
64 c += 6;
65 } else if (!strncmp(start, "s16max", 6)) {
66 ret = sval_type_val(type, SHRT_MAX);
67 c += 6;
68 } else if (!strncmp(start, "min", 3)) {
69 ret = sval_type_min(type);
70 c += 3;
71 } else if (!strncmp(start, "s64min", 6)) {
72 ret = sval_type_val(type, LLONG_MIN);
73 c += 6;
74 } else if (!strncmp(start, "s32min", 6)) {
75 ret = sval_type_val(type, INT_MIN);
76 c += 6;
77 } else if (!strncmp(start, "s16min", 6)) {
78 ret = sval_type_val(type, SHRT_MIN);
79 c += 6;
80 } else {
81 ret = sval_type_val(type, strtoll(start, &c, 10));
83 *endp = c;
84 return ret;
87 void str_to_rl(struct symbol *type, char *value, struct range_list **rl)
89 sval_t min, max;
90 char *c;
92 if (!type)
93 type = &llong_ctype;
94 *rl = NULL;
96 c = value;
97 while (*c) {
98 if (*c == '(')
99 c++;
100 min = parse_val(type, c, &c);
101 if (*c == ')')
102 c++;
103 if (!*c) {
104 add_range(rl, min, min);
105 break;
107 if (*c == ',') {
108 add_range(rl, min, min);
109 c++;
110 continue;
112 if (*c != '-') {
113 sm_msg("debug XXX: trouble parsing %s ", value);
114 break;
116 c++;
117 if (*c == '(')
118 c++;
119 max = parse_val(type, c, &c);
120 add_range(rl, min, max);
121 if (*c == ')')
122 c++;
123 if (!*c)
124 break;
125 if (*c != ',') {
126 sm_msg("debug YYY: trouble parsing %s %s", value, c);
127 break;
129 c++;
132 *rl = cast_rl(type, *rl);
135 int is_whole_rl(struct range_list *rl)
137 struct data_range *drange;
139 if (ptr_list_empty(rl))
140 return 1;
141 drange = first_ptr_list((struct ptr_list *)rl);
142 if (sval_is_min(drange->min) && sval_is_max(drange->max))
143 return 1;
144 return 0;
147 sval_t rl_min(struct range_list *rl)
149 struct data_range *drange;
150 sval_t ret;
152 ret.type = &llong_ctype;
153 ret.value = LLONG_MIN;
154 if (ptr_list_empty(rl))
155 return ret;
156 drange = first_ptr_list((struct ptr_list *)rl);
157 return drange->min;
160 sval_t rl_max(struct range_list *rl)
162 struct data_range *drange;
163 sval_t ret;
165 ret.type = &llong_ctype;
166 ret.value = LLONG_MAX;
167 if (ptr_list_empty(rl))
168 return ret;
169 drange = last_ptr_list((struct ptr_list *)rl);
170 return drange->max;
173 static struct data_range *alloc_range_helper_sval(sval_t min, sval_t max, int perm)
175 struct data_range *ret;
177 if (perm)
178 ret = __alloc_perm_data_range(0);
179 else
180 ret = __alloc_data_range(0);
181 ret->min = min;
182 ret->max = max;
183 return ret;
186 struct data_range *alloc_range(sval_t min, sval_t max)
188 return alloc_range_helper_sval(min, max, 0);
191 struct data_range *alloc_range_perm(sval_t min, sval_t max)
193 return alloc_range_helper_sval(min, max, 1);
196 struct range_list *alloc_rl(sval_t min, sval_t max)
198 struct range_list *rl = NULL;
200 add_range(&rl, min, max);
201 return rl;
204 struct range_list *alloc_whole_rl(struct symbol *type)
206 if (!type)
207 type = &llong_ctype;
209 return alloc_rl(sval_type_min(type), sval_type_max(type));
212 void add_range(struct range_list **list, sval_t min, sval_t max)
214 struct data_range *tmp = NULL;
215 struct data_range *new = NULL;
216 int check_next = 0;
219 * FIXME: This has a problem merging a range_list like: min-0,3-max
220 * with a range like 1-2. You end up with min-2,3-max instead of
221 * just min-max.
223 FOR_EACH_PTR(*list, tmp) {
224 if (check_next) {
225 /* Sometimes we overlap with more than one range
226 so we have to delete or modify the next range. */
227 if (max.value + 1 == tmp->min.value) {
228 /* join 2 ranges here */
229 new->max = tmp->max;
230 DELETE_CURRENT_PTR(tmp);
231 return;
234 /* Doesn't overlap with the next one. */
235 if (sval_cmp(max, tmp->min) < 0)
236 return;
237 /* Partially overlaps with the next one. */
238 if (sval_cmp(max, tmp->max) < 0) {
239 tmp->min.value = max.value + 1;
240 return;
242 /* Completely overlaps with the next one. */
243 if (sval_cmp(max, tmp->max) >= 0) {
244 DELETE_CURRENT_PTR(tmp);
245 /* there could be more ranges to delete */
246 continue;
249 if (!sval_is_max(max) && max.value + 1 == tmp->min.value) {
250 /* join 2 ranges into a big range */
251 new = alloc_range(min, tmp->max);
252 REPLACE_CURRENT_PTR(tmp, new);
253 return;
255 if (sval_cmp(max, tmp->min) < 0) { /* new range entirely below */
256 new = alloc_range(min, max);
257 INSERT_CURRENT(new, tmp);
258 return;
260 if (sval_cmp(min, tmp->min) < 0) { /* new range partially below */
261 if (sval_cmp(max, tmp->max) < 0)
262 max = tmp->max;
263 else
264 check_next = 1;
265 new = alloc_range(min, max);
266 REPLACE_CURRENT_PTR(tmp, new);
267 if (!check_next)
268 return;
269 continue;
271 if (sval_cmp(max, tmp->max) <= 0) /* new range already included */
272 return;
273 if (sval_cmp(min, tmp->max) <= 0) { /* new range partially above */
274 min = tmp->min;
275 new = alloc_range(min, max);
276 REPLACE_CURRENT_PTR(tmp, new);
277 check_next = 1;
278 continue;
280 if (!sval_is_min(min) && min.value - 1 == tmp->max.value) {
281 /* join 2 ranges into a big range */
282 new = alloc_range(tmp->min, max);
283 REPLACE_CURRENT_PTR(tmp, new);
284 check_next = 1;
285 continue;
287 /* the new range is entirely above the existing ranges */
288 } END_FOR_EACH_PTR(tmp);
289 if (check_next)
290 return;
291 new = alloc_range(min, max);
292 add_ptr_list(list, new);
295 struct range_list *clone_rl(struct range_list *list)
297 struct data_range *tmp;
298 struct range_list *ret = NULL;
300 FOR_EACH_PTR(list, tmp) {
301 add_ptr_list(&ret, tmp);
302 } END_FOR_EACH_PTR(tmp);
303 return ret;
306 struct range_list *clone_rl_permanent(struct range_list *list)
308 struct data_range *tmp;
309 struct data_range *new;
310 struct range_list *ret = NULL;
312 FOR_EACH_PTR(list, tmp) {
313 new = alloc_range_perm(tmp->min, tmp->max);
314 add_ptr_list(&ret, new);
315 } END_FOR_EACH_PTR(tmp);
316 return ret;
319 struct range_list *rl_union(struct range_list *one, struct range_list *two)
321 struct data_range *tmp;
322 struct range_list *ret = NULL;
324 FOR_EACH_PTR(one, tmp) {
325 add_range(&ret, tmp->min, tmp->max);
326 } END_FOR_EACH_PTR(tmp);
327 FOR_EACH_PTR(two, tmp) {
328 add_range(&ret, tmp->min, tmp->max);
329 } END_FOR_EACH_PTR(tmp);
330 return ret;
333 struct range_list *remove_range(struct range_list *list, sval_t min, sval_t max)
335 struct data_range *tmp;
336 struct range_list *ret = NULL;
338 FOR_EACH_PTR(list, tmp) {
339 if (sval_cmp(tmp->max, min) < 0) {
340 add_range(&ret, tmp->min, tmp->max);
341 continue;
343 if (sval_cmp(tmp->min, max) > 0) {
344 add_range(&ret, tmp->min, tmp->max);
345 continue;
347 if (sval_cmp(tmp->min, min) >= 0 && sval_cmp(tmp->max, max) <= 0)
348 continue;
349 if (sval_cmp(tmp->min, min) >= 0) {
350 max.value++;
351 add_range(&ret, max, tmp->max);
352 } else if (sval_cmp(tmp->max, max) <= 0) {
353 min.value--;
354 add_range(&ret, tmp->min, min);
355 } else {
356 min.value--;
357 max.value++;
358 add_range(&ret, tmp->min, min);
359 add_range(&ret, max, tmp->max);
361 } END_FOR_EACH_PTR(tmp);
362 return ret;
365 int ranges_equiv(struct data_range *one, struct data_range *two)
367 if (!one && !two)
368 return 1;
369 if (!one || !two)
370 return 0;
371 if (sval_cmp(one->min, two->min) != 0)
372 return 0;
373 if (sval_cmp(one->max, two->max) != 0)
374 return 0;
375 return 1;
378 int rl_equiv(struct range_list *one, struct range_list *two)
380 struct data_range *one_range;
381 struct data_range *two_range;
383 if (one == two)
384 return 1;
386 PREPARE_PTR_LIST(one, one_range);
387 PREPARE_PTR_LIST(two, two_range);
388 for (;;) {
389 if (!one_range && !two_range)
390 return 1;
391 if (!ranges_equiv(one_range, two_range))
392 return 0;
393 NEXT_PTR_LIST(one_range);
394 NEXT_PTR_LIST(two_range);
396 FINISH_PTR_LIST(two_range);
397 FINISH_PTR_LIST(one_range);
399 return 1;
402 int true_comparison_range(struct data_range *left, int comparison, struct data_range *right)
404 switch (comparison) {
405 case '<':
406 case SPECIAL_UNSIGNED_LT:
407 if (sval_cmp(left->min, right->max) < 0)
408 return 1;
409 return 0;
410 case SPECIAL_UNSIGNED_LTE:
411 case SPECIAL_LTE:
412 if (sval_cmp(left->min, right->max) <= 0)
413 return 1;
414 return 0;
415 case SPECIAL_EQUAL:
416 if (sval_cmp(left->max, right->min) < 0)
417 return 0;
418 if (sval_cmp(left->min, right->max) > 0)
419 return 0;
420 return 1;
421 case SPECIAL_UNSIGNED_GTE:
422 case SPECIAL_GTE:
423 if (sval_cmp(left->max, right->min) >= 0)
424 return 1;
425 return 0;
426 case '>':
427 case SPECIAL_UNSIGNED_GT:
428 if (sval_cmp(left->max, right->min) > 0)
429 return 1;
430 return 0;
431 case SPECIAL_NOTEQUAL:
432 if (sval_cmp(left->min, left->max) != 0)
433 return 1;
434 if (sval_cmp(right->min, right->max) != 0)
435 return 1;
436 if (sval_cmp(left->min, right->min) != 0)
437 return 1;
438 return 0;
439 default:
440 sm_msg("unhandled comparison %d\n", comparison);
441 return 0;
443 return 0;
446 int true_comparison_range_LR(int comparison, struct data_range *var, struct data_range *val, int left)
448 if (left)
449 return true_comparison_range(var, comparison, val);
450 else
451 return true_comparison_range(val, comparison, var);
454 static int false_comparison_range_sval(struct data_range *left, int comparison, struct data_range *right)
456 switch (comparison) {
457 case '<':
458 case SPECIAL_UNSIGNED_LT:
459 if (sval_cmp(left->max, right->min) >= 0)
460 return 1;
461 return 0;
462 case SPECIAL_UNSIGNED_LTE:
463 case SPECIAL_LTE:
464 if (sval_cmp(left->max, right->min) > 0)
465 return 1;
466 return 0;
467 case SPECIAL_EQUAL:
468 if (sval_cmp(left->min, left->max) != 0)
469 return 1;
470 if (sval_cmp(right->min, right->max) != 0)
471 return 1;
472 if (sval_cmp(left->min, right->min) != 0)
473 return 1;
474 return 0;
475 case SPECIAL_UNSIGNED_GTE:
476 case SPECIAL_GTE:
477 if (sval_cmp(left->min, right->max) < 0)
478 return 1;
479 return 0;
480 case '>':
481 case SPECIAL_UNSIGNED_GT:
482 if (sval_cmp(left->min, right->max) <= 0)
483 return 1;
484 return 0;
485 case SPECIAL_NOTEQUAL:
486 if (sval_cmp(left->max, right->min) < 0)
487 return 0;
488 if (sval_cmp(left->min, right->max) > 0)
489 return 0;
490 return 1;
491 default:
492 sm_msg("unhandled comparison %d\n", comparison);
493 return 0;
495 return 0;
498 int false_comparison_range_LR(int comparison, struct data_range *var, struct data_range *val, int left)
500 if (left)
501 return false_comparison_range_sval(var, comparison, val);
502 else
503 return false_comparison_range_sval(val, comparison, var);
506 int possibly_true(struct expression *left, int comparison, struct expression *right)
508 struct range_list *rl_left, *rl_right;
509 struct data_range *tmp_left, *tmp_right;
511 if (!get_implied_rl(left, &rl_left))
512 return 1;
513 if (!get_implied_rl(right, &rl_right))
514 return 1;
516 FOR_EACH_PTR(rl_left, tmp_left) {
517 FOR_EACH_PTR(rl_right, tmp_right) {
518 if (true_comparison_range(tmp_left, comparison, tmp_right))
519 return 1;
520 } END_FOR_EACH_PTR(tmp_right);
521 } END_FOR_EACH_PTR(tmp_left);
522 return 0;
525 int possibly_false(struct expression *left, int comparison, struct expression *right)
527 struct range_list *rl_left, *rl_right;
528 struct data_range *tmp_left, *tmp_right;
530 if (!get_implied_rl(left, &rl_left))
531 return 1;
532 if (!get_implied_rl(right, &rl_right))
533 return 1;
535 FOR_EACH_PTR(rl_left, tmp_left) {
536 FOR_EACH_PTR(rl_right, tmp_right) {
537 if (false_comparison_range_sval(tmp_left, comparison, tmp_right))
538 return 1;
539 } END_FOR_EACH_PTR(tmp_right);
540 } END_FOR_EACH_PTR(tmp_left);
541 return 0;
544 int possibly_true_rl(struct range_list *left_ranges, int comparison, struct range_list *right_ranges)
546 struct data_range *left_tmp, *right_tmp;
548 if (!left_ranges || !right_ranges)
549 return 1;
551 FOR_EACH_PTR(left_ranges, left_tmp) {
552 FOR_EACH_PTR(right_ranges, right_tmp) {
553 if (true_comparison_range(left_tmp, comparison, right_tmp))
554 return 1;
555 } END_FOR_EACH_PTR(right_tmp);
556 } END_FOR_EACH_PTR(left_tmp);
557 return 0;
560 int possibly_false_rl(struct range_list *left_ranges, int comparison, struct range_list *right_ranges)
562 struct data_range *left_tmp, *right_tmp;
564 if (!left_ranges || !right_ranges)
565 return 1;
567 FOR_EACH_PTR(left_ranges, left_tmp) {
568 FOR_EACH_PTR(right_ranges, right_tmp) {
569 if (false_comparison_range_sval(left_tmp, comparison, right_tmp))
570 return 1;
571 } END_FOR_EACH_PTR(right_tmp);
572 } END_FOR_EACH_PTR(left_tmp);
573 return 0;
576 /* FIXME: the _rl here stands for right left so really it should be _lr */
577 int possibly_true_rl_LR(int comparison, struct range_list *a, struct range_list *b, int left)
579 if (left)
580 return possibly_true_rl(a, comparison, b);
581 else
582 return possibly_true_rl(b, comparison, a);
585 int possibly_false_rl_LR(int comparison, struct range_list *a, struct range_list *b, int left)
587 if (left)
588 return possibly_false_rl(a, comparison, b);
589 else
590 return possibly_false_rl(b, comparison, a);
593 void tack_on(struct range_list **list, struct data_range *drange)
595 add_ptr_list(list, drange);
598 void push_rl(struct range_list_stack **rl_stack, struct range_list *rl)
600 add_ptr_list(rl_stack, rl);
603 struct range_list *pop_rl(struct range_list_stack **rl_stack)
605 struct range_list *rl;
607 rl = last_ptr_list((struct ptr_list *)*rl_stack);
608 delete_ptr_list_last((struct ptr_list **)rl_stack);
609 return rl;
612 struct range_list *top_rl(struct range_list_stack *rl_stack)
614 struct range_list *rl;
616 rl = last_ptr_list((struct ptr_list *)rl_stack);
617 return rl;
620 void filter_top_rl(struct range_list_stack **rl_stack, sval_t sval)
622 struct range_list *rl;
624 rl = pop_rl(rl_stack);
625 rl = remove_range(rl, sval, sval);
626 push_rl(rl_stack, rl);
629 static int sval_too_big(struct symbol *type, sval_t sval)
631 if (type_bits(type) == 64)
632 return 0;
633 if (sval.uvalue > ((1ULL << type_bits(type)) - 1))
634 return 1;
635 return 0;
638 static void add_range_t(struct symbol *type, struct range_list **rl, sval_t min, sval_t max)
640 /* If we're just adding a number, cast it and add it */
641 if (sval_cmp(min, max) == 0) {
642 add_range(rl, sval_cast(type, min), sval_cast(type, max));
643 return;
646 /* If the range is within the type range then add it */
647 if (sval_fits(type, min) && sval_fits(type, max)) {
648 add_range(rl, sval_cast(type, min), sval_cast(type, max));
649 return;
653 * If the range we are adding has more bits than the range type then
654 * add the whole range type. Eg:
655 * 0x8000000000000000 - 0xf000000000000000 -> cast to int
656 * This isn't totally the right thing to do. We could be more granular.
658 if (sval_too_big(type, min) || sval_too_big(type, max)) {
659 add_range(rl, sval_type_min(type), sval_type_max(type));
660 return;
663 /* Cast negative values to high positive values */
664 if (sval_is_negative(min) && type_unsigned(type)) {
665 if (sval_is_positive(max)) {
666 if (sval_too_high(type, max)) {
667 add_range(rl, sval_type_min(type), sval_type_max(type));
668 return;
670 add_range(rl, sval_type_val(type, 0), sval_cast(type, max));
671 max = sval_type_max(type);
672 } else {
673 max = sval_cast(type, max);
675 min = sval_cast(type, min);
676 add_range(rl, min, max);
679 /* Cast high positive numbers to negative */
680 if (sval_unsigned(max) && sval_is_negative(sval_cast(type, max))) {
681 if (!sval_is_negative(sval_cast(type, min))) {
682 add_range(rl, sval_cast(type, min), sval_type_max(type));
683 min = sval_type_min(type);
684 } else {
685 min = sval_cast(type, min);
687 max = sval_cast(type, max);
688 add_range(rl, min, max);
691 return;
694 struct range_list *cast_rl(struct symbol *type, struct range_list *rl)
696 struct data_range *tmp;
697 struct range_list *ret = NULL;
699 if (!rl)
700 return NULL;
702 if (!type)
703 return clone_rl(rl);
705 FOR_EACH_PTR(rl, tmp) {
706 add_range_t(type, &ret, tmp->min, tmp->max);
707 } END_FOR_EACH_PTR(tmp);
709 if (!ret)
710 return alloc_whole_rl(type);
712 return ret;
715 struct range_list *rl_invert(struct range_list *orig)
717 struct range_list *ret = NULL;
718 struct data_range *tmp;
719 sval_t gap_min, abs_max, sval;
721 if (!orig)
722 return NULL;
724 gap_min = sval_type_min(rl_min(orig).type);
725 abs_max = sval_type_max(rl_max(orig).type);
727 FOR_EACH_PTR(orig, tmp) {
728 if (sval_cmp(tmp->min, gap_min) > 0) {
729 sval = sval_type_val(tmp->min.type, tmp->min.value - 1);
730 add_range(&ret, gap_min, sval);
732 gap_min = sval_type_val(tmp->max.type, tmp->max.value + 1);
733 if (sval_cmp(tmp->max, abs_max) == 0)
734 gap_min = abs_max;
735 } END_FOR_EACH_PTR(tmp);
737 if (sval_cmp(gap_min, abs_max) < 0)
738 add_range(&ret, gap_min, abs_max);
740 return ret;
743 struct range_list *rl_filter(struct range_list *rl, struct range_list *filter)
745 struct data_range *tmp;
747 FOR_EACH_PTR(filter, tmp) {
748 rl = remove_range(rl, tmp->min, tmp->max);
749 } END_FOR_EACH_PTR(tmp);
751 return rl;
754 struct range_list *rl_intersection(struct range_list *one, struct range_list *two)
756 if (!two)
757 return NULL;
758 two = rl_invert(two);
759 return rl_filter(one, two);
762 void free_rl(struct range_list **rlist)
764 __free_ptr_list((struct ptr_list **)rlist);
767 static void free_single_dinfo(struct data_info *dinfo)
769 if (dinfo->type == DATA_RANGE)
770 free_rl(&dinfo->value_ranges);
773 static void free_dinfos(struct allocation_blob *blob)
775 unsigned int size = sizeof(struct data_info);
776 unsigned int offset = 0;
778 while (offset < blob->offset) {
779 free_single_dinfo((struct data_info *)(blob->data + offset));
780 offset += size;
784 void free_data_info_allocs(void)
786 struct allocator_struct *desc = &data_info_allocator;
787 struct allocation_blob *blob = desc->blobs;
789 desc->blobs = NULL;
790 desc->allocations = 0;
791 desc->total_bytes = 0;
792 desc->useful_bytes = 0;
793 desc->freelist = NULL;
794 while (blob) {
795 struct allocation_blob *next = blob->next;
796 free_dinfos(blob);
797 blob_free(blob, desc->chunking);
798 blob = next;
800 clear_data_range_alloc();