db: move caller_info as close as possible to raw SQL
[smatch.git] / smatch_ranges.c
blob81ba561cc6e648bcefe809f79a8d27f7bf180196
1 /*
2 * sparse/smatch_ranges.c
4 * Copyright (C) 2009 Dan Carpenter.
6 * Licensed under the Open Software License version 1.1
8 */
10 #include "parse.h"
11 #include "smatch.h"
12 #include "smatch_extra.h"
13 #include "smatch_slist.h"
15 ALLOCATOR(data_info, "smatch extra data");
16 ALLOCATOR(data_range, "data range");
17 __DO_ALLOCATOR(struct data_range, sizeof(struct data_range), __alignof__(struct data_range),
18 "permanent ranges", perm_data_range);
20 char *show_rl(struct range_list *list)
22 struct data_range *tmp;
23 char full[256];
24 int i = 0;
26 full[0] = '\0';
27 full[255] = '\0';
28 FOR_EACH_PTR(list, tmp) {
29 if (i++)
30 strncat(full, ",", 254 - strlen(full));
31 if (sval_cmp(tmp->min, tmp->max) == 0) {
32 strncat(full, sval_to_str(tmp->min), 254 - strlen(full));
33 continue;
35 strncat(full, sval_to_str(tmp->min), 254 - strlen(full));
36 strncat(full, "-", 254 - strlen(full));
37 strncat(full, sval_to_str(tmp->max), 254 - strlen(full));
38 } END_FOR_EACH_PTR(tmp);
39 return alloc_sname(full);
42 static sval_t parse_val(struct symbol *type, char *c, char **endp)
44 char *start = c;
45 sval_t ret;
47 if (!strncmp(start, "max", 3)) {
48 ret = sval_type_max(type);
49 c += 3;
50 } else if (!strncmp(start, "u64max", 6)) {
51 ret = sval_type_val(type, ULLONG_MAX);
52 c += 6;
53 } else if (!strncmp(start, "s64max", 6)) {
54 ret = sval_type_val(type, LLONG_MAX);
55 c += 6;
56 } else if (!strncmp(start, "u32max", 6)) {
57 ret = sval_type_val(type, UINT_MAX);
58 c += 6;
59 } else if (!strncmp(start, "s32max", 6)) {
60 ret = sval_type_val(type, INT_MAX);
61 c += 6;
62 } else if (!strncmp(start, "u16max", 6)) {
63 ret = sval_type_val(type, USHRT_MAX);
64 c += 6;
65 } else if (!strncmp(start, "s16max", 6)) {
66 ret = sval_type_val(type, SHRT_MAX);
67 c += 6;
68 } else if (!strncmp(start, "min", 3)) {
69 ret = sval_type_min(type);
70 c += 3;
71 } else if (!strncmp(start, "s64min", 6)) {
72 ret = sval_type_val(type, LLONG_MIN);
73 c += 6;
74 } else if (!strncmp(start, "s32min", 6)) {
75 ret = sval_type_val(type, INT_MIN);
76 c += 6;
77 } else if (!strncmp(start, "s16min", 6)) {
78 ret = sval_type_val(type, SHRT_MIN);
79 c += 6;
80 } else {
81 ret = sval_type_val(type, strtoll(start, &c, 10));
83 *endp = c;
84 return ret;
87 void str_to_rl(struct symbol *type, char *value, struct range_list **rl)
89 sval_t min, max;
90 char *c;
92 if (!type)
93 type = &llong_ctype;
94 *rl = NULL;
96 c = value;
97 while (*c) {
98 if (*c == '(')
99 c++;
100 min = parse_val(type, c, &c);
101 if (*c == ')')
102 c++;
103 if (!*c) {
104 add_range(rl, min, min);
105 break;
107 if (*c == ',') {
108 add_range(rl, min, min);
109 c++;
110 continue;
112 if (*c != '-') {
113 sm_msg("debug XXX: trouble parsing %s ", value);
114 break;
116 c++;
117 if (*c == '(')
118 c++;
119 max = parse_val(type, c, &c);
120 add_range(rl, min, max);
121 if (*c == ')')
122 c++;
123 if (!*c)
124 break;
125 if (*c != ',') {
126 sm_msg("debug YYY: trouble parsing %s %s", value, c);
127 break;
129 c++;
132 *rl = cast_rl(type, *rl);
135 int is_whole_rl(struct range_list *rl)
137 struct data_range *drange;
139 if (ptr_list_empty(rl))
140 return 1;
141 drange = first_ptr_list((struct ptr_list *)rl);
142 if (sval_is_min(drange->min) && sval_is_max(drange->max))
143 return 1;
144 return 0;
147 sval_t rl_min(struct range_list *rl)
149 struct data_range *drange;
150 sval_t ret;
152 ret.type = &llong_ctype;
153 ret.value = LLONG_MIN;
154 if (ptr_list_empty(rl))
155 return ret;
156 drange = first_ptr_list((struct ptr_list *)rl);
157 return drange->min;
160 sval_t rl_max(struct range_list *rl)
162 struct data_range *drange;
163 sval_t ret;
165 ret.type = &llong_ctype;
166 ret.value = LLONG_MAX;
167 if (ptr_list_empty(rl))
168 return ret;
169 drange = last_ptr_list((struct ptr_list *)rl);
170 return drange->max;
173 static struct data_range *alloc_range_helper_sval(sval_t min, sval_t max, int perm)
175 struct data_range *ret;
177 if (perm)
178 ret = __alloc_perm_data_range(0);
179 else
180 ret = __alloc_data_range(0);
181 ret->min = min;
182 ret->max = max;
183 return ret;
186 struct data_range *alloc_range(sval_t min, sval_t max)
188 return alloc_range_helper_sval(min, max, 0);
191 struct data_range *alloc_range_perm(sval_t min, sval_t max)
193 return alloc_range_helper_sval(min, max, 1);
196 struct range_list *alloc_rl(sval_t min, sval_t max)
198 struct range_list *rl = NULL;
200 if (sval_cmp(min, max) > 0)
201 return alloc_whole_rl(min.type);
203 add_range(&rl, min, max);
204 return rl;
207 struct range_list *alloc_whole_rl(struct symbol *type)
209 if (!type || type_positive_bits(type) < 0)
210 type = &llong_ctype;
212 return alloc_rl(sval_type_min(type), sval_type_max(type));
215 void add_range(struct range_list **list, sval_t min, sval_t max)
217 struct data_range *tmp = NULL;
218 struct data_range *new = NULL;
219 int check_next = 0;
222 * FIXME: This has a problem merging a range_list like: min-0,3-max
223 * with a range like 1-2. You end up with min-2,3-max instead of
224 * just min-max.
226 FOR_EACH_PTR(*list, tmp) {
227 if (check_next) {
228 /* Sometimes we overlap with more than one range
229 so we have to delete or modify the next range. */
230 if (max.value + 1 == tmp->min.value) {
231 /* join 2 ranges here */
232 new->max = tmp->max;
233 DELETE_CURRENT_PTR(tmp);
234 return;
237 /* Doesn't overlap with the next one. */
238 if (sval_cmp(max, tmp->min) < 0)
239 return;
240 /* Partially overlaps with the next one. */
241 if (sval_cmp(max, tmp->max) < 0) {
242 tmp->min.value = max.value + 1;
243 return;
245 /* Completely overlaps with the next one. */
246 if (sval_cmp(max, tmp->max) >= 0) {
247 DELETE_CURRENT_PTR(tmp);
248 /* there could be more ranges to delete */
249 continue;
252 if (!sval_is_max(max) && max.value + 1 == tmp->min.value) {
253 /* join 2 ranges into a big range */
254 new = alloc_range(min, tmp->max);
255 REPLACE_CURRENT_PTR(tmp, new);
256 return;
258 if (sval_cmp(max, tmp->min) < 0) { /* new range entirely below */
259 new = alloc_range(min, max);
260 INSERT_CURRENT(new, tmp);
261 return;
263 if (sval_cmp(min, tmp->min) < 0) { /* new range partially below */
264 if (sval_cmp(max, tmp->max) < 0)
265 max = tmp->max;
266 else
267 check_next = 1;
268 new = alloc_range(min, max);
269 REPLACE_CURRENT_PTR(tmp, new);
270 if (!check_next)
271 return;
272 continue;
274 if (sval_cmp(max, tmp->max) <= 0) /* new range already included */
275 return;
276 if (sval_cmp(min, tmp->max) <= 0) { /* new range partially above */
277 min = tmp->min;
278 new = alloc_range(min, max);
279 REPLACE_CURRENT_PTR(tmp, new);
280 check_next = 1;
281 continue;
283 if (!sval_is_min(min) && min.value - 1 == tmp->max.value) {
284 /* join 2 ranges into a big range */
285 new = alloc_range(tmp->min, max);
286 REPLACE_CURRENT_PTR(tmp, new);
287 check_next = 1;
288 continue;
290 /* the new range is entirely above the existing ranges */
291 } END_FOR_EACH_PTR(tmp);
292 if (check_next)
293 return;
294 new = alloc_range(min, max);
295 add_ptr_list(list, new);
298 struct range_list *clone_rl(struct range_list *list)
300 struct data_range *tmp;
301 struct range_list *ret = NULL;
303 FOR_EACH_PTR(list, tmp) {
304 add_ptr_list(&ret, tmp);
305 } END_FOR_EACH_PTR(tmp);
306 return ret;
309 struct range_list *clone_rl_permanent(struct range_list *list)
311 struct data_range *tmp;
312 struct data_range *new;
313 struct range_list *ret = NULL;
315 FOR_EACH_PTR(list, tmp) {
316 new = alloc_range_perm(tmp->min, tmp->max);
317 add_ptr_list(&ret, new);
318 } END_FOR_EACH_PTR(tmp);
319 return ret;
322 struct range_list *rl_union(struct range_list *one, struct range_list *two)
324 struct data_range *tmp;
325 struct range_list *ret = NULL;
327 FOR_EACH_PTR(one, tmp) {
328 add_range(&ret, tmp->min, tmp->max);
329 } END_FOR_EACH_PTR(tmp);
330 FOR_EACH_PTR(two, tmp) {
331 add_range(&ret, tmp->min, tmp->max);
332 } END_FOR_EACH_PTR(tmp);
333 return ret;
336 struct range_list *remove_range(struct range_list *list, sval_t min, sval_t max)
338 struct data_range *tmp;
339 struct range_list *ret = NULL;
341 FOR_EACH_PTR(list, tmp) {
342 if (sval_cmp(tmp->max, min) < 0) {
343 add_range(&ret, tmp->min, tmp->max);
344 continue;
346 if (sval_cmp(tmp->min, max) > 0) {
347 add_range(&ret, tmp->min, tmp->max);
348 continue;
350 if (sval_cmp(tmp->min, min) >= 0 && sval_cmp(tmp->max, max) <= 0)
351 continue;
352 if (sval_cmp(tmp->min, min) >= 0) {
353 max.value++;
354 add_range(&ret, max, tmp->max);
355 } else if (sval_cmp(tmp->max, max) <= 0) {
356 min.value--;
357 add_range(&ret, tmp->min, min);
358 } else {
359 min.value--;
360 max.value++;
361 add_range(&ret, tmp->min, min);
362 add_range(&ret, max, tmp->max);
364 } END_FOR_EACH_PTR(tmp);
365 return ret;
368 int ranges_equiv(struct data_range *one, struct data_range *two)
370 if (!one && !two)
371 return 1;
372 if (!one || !two)
373 return 0;
374 if (sval_cmp(one->min, two->min) != 0)
375 return 0;
376 if (sval_cmp(one->max, two->max) != 0)
377 return 0;
378 return 1;
381 int rl_equiv(struct range_list *one, struct range_list *two)
383 struct data_range *one_range;
384 struct data_range *two_range;
386 if (one == two)
387 return 1;
389 PREPARE_PTR_LIST(one, one_range);
390 PREPARE_PTR_LIST(two, two_range);
391 for (;;) {
392 if (!one_range && !two_range)
393 return 1;
394 if (!ranges_equiv(one_range, two_range))
395 return 0;
396 NEXT_PTR_LIST(one_range);
397 NEXT_PTR_LIST(two_range);
399 FINISH_PTR_LIST(two_range);
400 FINISH_PTR_LIST(one_range);
402 return 1;
405 int true_comparison_range(struct data_range *left, int comparison, struct data_range *right)
407 switch (comparison) {
408 case '<':
409 case SPECIAL_UNSIGNED_LT:
410 if (sval_cmp(left->min, right->max) < 0)
411 return 1;
412 return 0;
413 case SPECIAL_UNSIGNED_LTE:
414 case SPECIAL_LTE:
415 if (sval_cmp(left->min, right->max) <= 0)
416 return 1;
417 return 0;
418 case SPECIAL_EQUAL:
419 if (sval_cmp(left->max, right->min) < 0)
420 return 0;
421 if (sval_cmp(left->min, right->max) > 0)
422 return 0;
423 return 1;
424 case SPECIAL_UNSIGNED_GTE:
425 case SPECIAL_GTE:
426 if (sval_cmp(left->max, right->min) >= 0)
427 return 1;
428 return 0;
429 case '>':
430 case SPECIAL_UNSIGNED_GT:
431 if (sval_cmp(left->max, right->min) > 0)
432 return 1;
433 return 0;
434 case SPECIAL_NOTEQUAL:
435 if (sval_cmp(left->min, left->max) != 0)
436 return 1;
437 if (sval_cmp(right->min, right->max) != 0)
438 return 1;
439 if (sval_cmp(left->min, right->min) != 0)
440 return 1;
441 return 0;
442 default:
443 sm_msg("unhandled comparison %d\n", comparison);
444 return 0;
446 return 0;
449 int true_comparison_range_LR(int comparison, struct data_range *var, struct data_range *val, int left)
451 if (left)
452 return true_comparison_range(var, comparison, val);
453 else
454 return true_comparison_range(val, comparison, var);
457 static int false_comparison_range_sval(struct data_range *left, int comparison, struct data_range *right)
459 switch (comparison) {
460 case '<':
461 case SPECIAL_UNSIGNED_LT:
462 if (sval_cmp(left->max, right->min) >= 0)
463 return 1;
464 return 0;
465 case SPECIAL_UNSIGNED_LTE:
466 case SPECIAL_LTE:
467 if (sval_cmp(left->max, right->min) > 0)
468 return 1;
469 return 0;
470 case SPECIAL_EQUAL:
471 if (sval_cmp(left->min, left->max) != 0)
472 return 1;
473 if (sval_cmp(right->min, right->max) != 0)
474 return 1;
475 if (sval_cmp(left->min, right->min) != 0)
476 return 1;
477 return 0;
478 case SPECIAL_UNSIGNED_GTE:
479 case SPECIAL_GTE:
480 if (sval_cmp(left->min, right->max) < 0)
481 return 1;
482 return 0;
483 case '>':
484 case SPECIAL_UNSIGNED_GT:
485 if (sval_cmp(left->min, right->max) <= 0)
486 return 1;
487 return 0;
488 case SPECIAL_NOTEQUAL:
489 if (sval_cmp(left->max, right->min) < 0)
490 return 0;
491 if (sval_cmp(left->min, right->max) > 0)
492 return 0;
493 return 1;
494 default:
495 sm_msg("unhandled comparison %d\n", comparison);
496 return 0;
498 return 0;
501 int false_comparison_range_LR(int comparison, struct data_range *var, struct data_range *val, int left)
503 if (left)
504 return false_comparison_range_sval(var, comparison, val);
505 else
506 return false_comparison_range_sval(val, comparison, var);
509 int possibly_true(struct expression *left, int comparison, struct expression *right)
511 struct range_list *rl_left, *rl_right;
512 struct data_range *tmp_left, *tmp_right;
514 if (!get_implied_rl(left, &rl_left))
515 return 1;
516 if (!get_implied_rl(right, &rl_right))
517 return 1;
519 FOR_EACH_PTR(rl_left, tmp_left) {
520 FOR_EACH_PTR(rl_right, tmp_right) {
521 if (true_comparison_range(tmp_left, comparison, tmp_right))
522 return 1;
523 } END_FOR_EACH_PTR(tmp_right);
524 } END_FOR_EACH_PTR(tmp_left);
525 return 0;
528 int possibly_false(struct expression *left, int comparison, struct expression *right)
530 struct range_list *rl_left, *rl_right;
531 struct data_range *tmp_left, *tmp_right;
533 if (!get_implied_rl(left, &rl_left))
534 return 1;
535 if (!get_implied_rl(right, &rl_right))
536 return 1;
538 FOR_EACH_PTR(rl_left, tmp_left) {
539 FOR_EACH_PTR(rl_right, tmp_right) {
540 if (false_comparison_range_sval(tmp_left, comparison, tmp_right))
541 return 1;
542 } END_FOR_EACH_PTR(tmp_right);
543 } END_FOR_EACH_PTR(tmp_left);
544 return 0;
547 int possibly_true_rl(struct range_list *left_ranges, int comparison, struct range_list *right_ranges)
549 struct data_range *left_tmp, *right_tmp;
551 if (!left_ranges || !right_ranges)
552 return 1;
554 FOR_EACH_PTR(left_ranges, left_tmp) {
555 FOR_EACH_PTR(right_ranges, right_tmp) {
556 if (true_comparison_range(left_tmp, comparison, right_tmp))
557 return 1;
558 } END_FOR_EACH_PTR(right_tmp);
559 } END_FOR_EACH_PTR(left_tmp);
560 return 0;
563 int possibly_false_rl(struct range_list *left_ranges, int comparison, struct range_list *right_ranges)
565 struct data_range *left_tmp, *right_tmp;
567 if (!left_ranges || !right_ranges)
568 return 1;
570 FOR_EACH_PTR(left_ranges, left_tmp) {
571 FOR_EACH_PTR(right_ranges, right_tmp) {
572 if (false_comparison_range_sval(left_tmp, comparison, right_tmp))
573 return 1;
574 } END_FOR_EACH_PTR(right_tmp);
575 } END_FOR_EACH_PTR(left_tmp);
576 return 0;
579 /* FIXME: the _rl here stands for right left so really it should be _lr */
580 int possibly_true_rl_LR(int comparison, struct range_list *a, struct range_list *b, int left)
582 if (left)
583 return possibly_true_rl(a, comparison, b);
584 else
585 return possibly_true_rl(b, comparison, a);
588 int possibly_false_rl_LR(int comparison, struct range_list *a, struct range_list *b, int left)
590 if (left)
591 return possibly_false_rl(a, comparison, b);
592 else
593 return possibly_false_rl(b, comparison, a);
596 void tack_on(struct range_list **list, struct data_range *drange)
598 add_ptr_list(list, drange);
601 void push_rl(struct range_list_stack **rl_stack, struct range_list *rl)
603 add_ptr_list(rl_stack, rl);
606 struct range_list *pop_rl(struct range_list_stack **rl_stack)
608 struct range_list *rl;
610 rl = last_ptr_list((struct ptr_list *)*rl_stack);
611 delete_ptr_list_last((struct ptr_list **)rl_stack);
612 return rl;
615 struct range_list *top_rl(struct range_list_stack *rl_stack)
617 struct range_list *rl;
619 rl = last_ptr_list((struct ptr_list *)rl_stack);
620 return rl;
623 void filter_top_rl(struct range_list_stack **rl_stack, sval_t sval)
625 struct range_list *rl;
627 rl = pop_rl(rl_stack);
628 rl = remove_range(rl, sval, sval);
629 push_rl(rl_stack, rl);
632 static int sval_too_big(struct symbol *type, sval_t sval)
634 if (type_bits(type) == 64)
635 return 0;
636 if (sval.uvalue > ((1ULL << type_bits(type)) - 1))
637 return 1;
638 return 0;
641 static void add_range_t(struct symbol *type, struct range_list **rl, sval_t min, sval_t max)
643 /* If we're just adding a number, cast it and add it */
644 if (sval_cmp(min, max) == 0) {
645 add_range(rl, sval_cast(type, min), sval_cast(type, max));
646 return;
649 /* If the range is within the type range then add it */
650 if (sval_fits(type, min) && sval_fits(type, max)) {
651 add_range(rl, sval_cast(type, min), sval_cast(type, max));
652 return;
656 * If the range we are adding has more bits than the range type then
657 * add the whole range type. Eg:
658 * 0x8000000000000000 - 0xf000000000000000 -> cast to int
659 * This isn't totally the right thing to do. We could be more granular.
661 if (sval_too_big(type, min) || sval_too_big(type, max)) {
662 add_range(rl, sval_type_min(type), sval_type_max(type));
663 return;
666 /* Cast negative values to high positive values */
667 if (sval_is_negative(min) && type_unsigned(type)) {
668 if (sval_is_positive(max)) {
669 if (sval_too_high(type, max)) {
670 add_range(rl, sval_type_min(type), sval_type_max(type));
671 return;
673 add_range(rl, sval_type_val(type, 0), sval_cast(type, max));
674 max = sval_type_max(type);
675 } else {
676 max = sval_cast(type, max);
678 min = sval_cast(type, min);
679 add_range(rl, min, max);
682 /* Cast high positive numbers to negative */
683 if (sval_unsigned(max) && sval_is_negative(sval_cast(type, max))) {
684 if (!sval_is_negative(sval_cast(type, min))) {
685 add_range(rl, sval_cast(type, min), sval_type_max(type));
686 min = sval_type_min(type);
687 } else {
688 min = sval_cast(type, min);
690 max = sval_cast(type, max);
691 add_range(rl, min, max);
694 return;
697 struct range_list *cast_rl(struct symbol *type, struct range_list *rl)
699 struct data_range *tmp;
700 struct range_list *ret = NULL;
702 if (!rl)
703 return NULL;
705 if (!type)
706 return clone_rl(rl);
708 FOR_EACH_PTR(rl, tmp) {
709 add_range_t(type, &ret, tmp->min, tmp->max);
710 } END_FOR_EACH_PTR(tmp);
712 if (!ret)
713 return alloc_whole_rl(type);
715 return ret;
718 struct range_list *rl_invert(struct range_list *orig)
720 struct range_list *ret = NULL;
721 struct data_range *tmp;
722 sval_t gap_min, abs_max, sval;
724 if (!orig)
725 return NULL;
727 gap_min = sval_type_min(rl_min(orig).type);
728 abs_max = sval_type_max(rl_max(orig).type);
730 FOR_EACH_PTR(orig, tmp) {
731 if (sval_cmp(tmp->min, gap_min) > 0) {
732 sval = sval_type_val(tmp->min.type, tmp->min.value - 1);
733 add_range(&ret, gap_min, sval);
735 gap_min = sval_type_val(tmp->max.type, tmp->max.value + 1);
736 if (sval_cmp(tmp->max, abs_max) == 0)
737 gap_min = abs_max;
738 } END_FOR_EACH_PTR(tmp);
740 if (sval_cmp(gap_min, abs_max) < 0)
741 add_range(&ret, gap_min, abs_max);
743 return ret;
746 struct range_list *rl_filter(struct range_list *rl, struct range_list *filter)
748 struct data_range *tmp;
750 FOR_EACH_PTR(filter, tmp) {
751 rl = remove_range(rl, tmp->min, tmp->max);
752 } END_FOR_EACH_PTR(tmp);
754 return rl;
757 struct range_list *rl_intersection(struct range_list *one, struct range_list *two)
759 if (!two)
760 return NULL;
761 two = rl_invert(two);
762 return rl_filter(one, two);
765 void free_rl(struct range_list **rlist)
767 __free_ptr_list((struct ptr_list **)rlist);
770 static void free_single_dinfo(struct data_info *dinfo)
772 free_rl(&dinfo->value_ranges);
775 static void free_dinfos(struct allocation_blob *blob)
777 unsigned int size = sizeof(struct data_info);
778 unsigned int offset = 0;
780 while (offset < blob->offset) {
781 free_single_dinfo((struct data_info *)(blob->data + offset));
782 offset += size;
786 void free_data_info_allocs(void)
788 struct allocator_struct *desc = &data_info_allocator;
789 struct allocation_blob *blob = desc->blobs;
791 desc->blobs = NULL;
792 desc->allocations = 0;
793 desc->total_bytes = 0;
794 desc->useful_bytes = 0;
795 desc->freelist = NULL;
796 while (blob) {
797 struct allocation_blob *next = blob->next;
798 free_dinfos(blob);
799 blob_free(blob, desc->chunking);
800 blob = next;
802 clear_data_range_alloc();