2 * Copyright (C) 2009 Dan Carpenter.
4 * This program is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU General Public License
6 * as published by the Free Software Foundation; either version 2
7 * of the License, or (at your option) any later version.
9 * This program is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 * GNU General Public License for more details.
14 * You should have received a copy of the GNU General Public License
15 * along with this program; if not, see http://www.gnu.org/copyleft/gpl.txt
20 #include "smatch_extra.h"
21 #include "smatch_slist.h"
23 ALLOCATOR(data_info
, "smatch extra data");
24 ALLOCATOR(data_range
, "data range");
25 __DO_ALLOCATOR(struct data_range
, sizeof(struct data_range
), __alignof__(struct data_range
),
26 "permanent ranges", perm_data_range
);
28 char *show_rl(struct range_list
*list
)
30 struct data_range
*tmp
;
36 FOR_EACH_PTR(list
, tmp
) {
38 strncat(full
, ",", 254 - strlen(full
));
39 if (sval_cmp(tmp
->min
, tmp
->max
) == 0) {
40 strncat(full
, sval_to_str(tmp
->min
), 254 - strlen(full
));
43 strncat(full
, sval_to_str(tmp
->min
), 254 - strlen(full
));
44 strncat(full
, "-", 254 - strlen(full
));
45 strncat(full
, sval_to_str(tmp
->max
), 254 - strlen(full
));
46 } END_FOR_EACH_PTR(tmp
);
47 return alloc_sname(full
);
50 static int str_to_comparison_arg_helper(const char *str
,
51 struct expression
*call
, int *comparison
,
52 struct expression
**arg
, char **endp
)
55 char *c
= (char *)str
;
64 *comparison
= SPECIAL_LTE
;
69 } else if (*c
== '=') {
72 *comparison
= SPECIAL_EQUAL
;
73 } else if (*c
== '>') {
76 *comparison
= SPECIAL_GTE
;
81 } else if (*c
== '!') {
84 *comparison
= SPECIAL_NOTEQUAL
;
93 param
= strtoll(c
, &c
, 10);
94 c
++; /* skip the ']' character */
100 *arg
= get_argument_from_call_expr(call
->args
, param
);
106 int str_to_comparison_arg(const char *str
, struct expression
*call
, int *comparison
, struct expression
**arg
)
115 return str_to_comparison_arg_helper(str
, call
, comparison
, arg
, NULL
);
118 static int get_val_from_key(int use_max
, struct symbol
*type
, char *c
, struct expression
*call
, char **endp
, sval_t
*sval
)
120 struct expression
*arg
;
125 ret
= sval_type_max(type
);
127 ret
= sval_type_min(type
);
129 if (!str_to_comparison_arg_helper(c
, call
, &comparison
, &arg
, endp
)) {
134 if (use_max
&& get_implied_max(arg
, &tmp
)) {
136 if (comparison
== '<') {
138 ret
= sval_binop(ret
, '-', tmp
);
141 if (!use_max
&& get_implied_min(arg
, &tmp
)) {
143 if (comparison
== '>') {
145 ret
= sval_binop(ret
, '+', tmp
);
153 static sval_t
add_one(sval_t sval
)
159 static sval_t
sub_one(sval_t sval
)
165 static struct range_list
*filter_by_comparison(char *c
, struct expression
*call
, char **endp
, struct range_list
*start_rl
)
167 struct expression
*arg
;
168 struct symbol
*cast_type
;
169 struct range_list
*left_orig
, *right_orig
;
170 struct range_list
*ret_rl
= start_rl
;
174 if (!str_to_comparison_arg_helper(c
, call
, &comparison
, &arg
, endp
))
177 if (!get_implied_rl(arg
, &right_orig
))
180 cast_type
= rl_type(start_rl
);
181 if (sval_type_max(rl_type(start_rl
)).uvalue
< sval_type_max(rl_type(right_orig
)).uvalue
)
182 cast_type
= rl_type(right_orig
);
183 if (sval_type_max(cast_type
).uvalue
< INT_MAX
)
184 cast_type
= &int_ctype
;
186 min
= sval_type_min(cast_type
);
187 max
= sval_type_max(cast_type
);
188 left_orig
= cast_rl(cast_type
, start_rl
);
189 right_orig
= cast_rl(cast_type
, right_orig
);
192 switch (comparison
) {
194 ret_rl
= remove_range(left_orig
, rl_max(right_orig
), max
);
197 if (!sval_is_max(rl_max(right_orig
)))
198 ret_rl
= remove_range(left_orig
, add_one(rl_max(right_orig
)), max
);
201 if (!sval_is_max(rl_max(right_orig
)))
202 ret_rl
= remove_range(ret_rl
, add_one(rl_max(right_orig
)), max
);
203 if (!sval_is_min(rl_min(right_orig
)))
204 ret_rl
= remove_range(ret_rl
, min
, sub_one(rl_min(right_orig
)));
207 if (!sval_is_min(rl_min(right_orig
)))
208 ret_rl
= remove_range(left_orig
, min
, sub_one(rl_min(right_orig
)));
211 ret_rl
= remove_range(left_orig
, min
, rl_min(right_orig
));
213 case SPECIAL_NOTEQUAL
:
214 if (sval_cmp(rl_min(right_orig
), rl_max(right_orig
)) == 0)
215 ret_rl
= remove_range(left_orig
, rl_min(right_orig
), rl_min(right_orig
));
221 return cast_rl(rl_type(start_rl
), ret_rl
);
224 static sval_t
parse_val(int use_max
, struct expression
*call
, struct symbol
*type
, char *c
, char **endp
)
229 if (!strncmp(start
, "max", 3)) {
230 ret
= sval_type_max(type
);
232 } else if (!strncmp(start
, "u64max", 6)) {
233 ret
= sval_type_val(type
, ULLONG_MAX
);
235 } else if (!strncmp(start
, "s64max", 6)) {
236 ret
= sval_type_val(type
, LLONG_MAX
);
238 } else if (!strncmp(start
, "u32max", 6)) {
239 ret
= sval_type_val(type
, UINT_MAX
);
241 } else if (!strncmp(start
, "s32max", 6)) {
242 ret
= sval_type_val(type
, INT_MAX
);
244 } else if (!strncmp(start
, "u16max", 6)) {
245 ret
= sval_type_val(type
, USHRT_MAX
);
247 } else if (!strncmp(start
, "s16max", 6)) {
248 ret
= sval_type_val(type
, SHRT_MAX
);
250 } else if (!strncmp(start
, "min", 3)) {
251 ret
= sval_type_min(type
);
253 } else if (!strncmp(start
, "s64min", 6)) {
254 ret
= sval_type_val(type
, LLONG_MIN
);
256 } else if (!strncmp(start
, "s32min", 6)) {
257 ret
= sval_type_val(type
, INT_MIN
);
259 } else if (!strncmp(start
, "s16min", 6)) {
260 ret
= sval_type_val(type
, SHRT_MIN
);
262 } else if (start
[0] == '[') {
263 /* this parses [==p0] comparisons */
264 get_val_from_key(1, type
, start
, call
, &c
, &ret
);
266 ret
= sval_type_val(type
, strtoll(start
, &c
, 10));
272 static char *jump_to_call_math(char *value
)
276 while (*c
&& *c
!= '[')
282 if (*c
== '<' || *c
== '=' || *c
== '>' || *c
== '!')
288 static void str_to_rl_helper(struct expression
*call
, struct symbol
*type
, char *value
, struct range_list
**rl
)
298 if (strcmp(value
, "empty") == 0)
301 if (strncmp(value
, "[==$", 4) == 0) {
302 struct expression
*arg
;
305 if (!str_to_comparison_arg(value
, call
, &comparison
, &arg
))
307 if (!get_implied_rl(arg
, rl
))
312 call_math
= jump_to_call_math(value
);
313 if (call_math
&& parse_call_math_rl(call
, call_math
, rl
))
316 min
= sval_type_min(type
);
317 max
= sval_type_max(type
);
319 while (*c
!= '\0' && *c
!= '[') {
322 min
= parse_val(0, call
, type
, c
, &c
);
326 if (*c
== '\0' || *c
== '[') {
327 add_range(rl
, min
, min
);
331 add_range(rl
, min
, min
);
336 sm_msg("debug XXX: trouble parsing %s c = %s", value
, c
);
342 max
= parse_val(1, call
, type
, c
, &c
);
343 add_range(rl
, min
, max
);
354 * For now if we already tried to handle the call math and couldn't
355 * figure it out then bail.
357 if (jump_to_call_math(c
) == c
+ 1)
361 *rl
= filter_by_comparison(c
, call
, &c
, *rl
);
364 *rl
= cast_rl(type
, *rl
);
367 void str_to_rl(struct symbol
*type
, char *value
, struct range_list
**rl
)
369 return str_to_rl_helper(NULL
, type
, value
, rl
);
372 void call_results_to_rl(struct expression
*expr
, struct symbol
*type
, char *value
, struct range_list
**rl
)
374 return str_to_rl_helper(strip_expr(expr
), type
, value
, rl
);
377 int is_whole_rl(struct range_list
*rl
)
379 struct data_range
*drange
;
381 if (ptr_list_empty(rl
))
383 drange
= first_ptr_list((struct ptr_list
*)rl
);
384 if (sval_is_min(drange
->min
) && sval_is_max(drange
->max
))
389 sval_t
rl_min(struct range_list
*rl
)
391 struct data_range
*drange
;
394 ret
.type
= &llong_ctype
;
395 ret
.value
= LLONG_MIN
;
396 if (ptr_list_empty(rl
))
398 drange
= first_ptr_list((struct ptr_list
*)rl
);
402 sval_t
rl_max(struct range_list
*rl
)
404 struct data_range
*drange
;
407 ret
.type
= &llong_ctype
;
408 ret
.value
= LLONG_MAX
;
409 if (ptr_list_empty(rl
))
411 drange
= last_ptr_list((struct ptr_list
*)rl
);
415 int rl_to_sval(struct range_list
*rl
, sval_t
*sval
)
424 if (sval_cmp(min
, max
) != 0)
430 struct symbol
*rl_type(struct range_list
*rl
)
434 return rl_min(rl
).type
;
437 static struct data_range
*alloc_range_helper_sval(sval_t min
, sval_t max
, int perm
)
439 struct data_range
*ret
;
442 ret
= __alloc_perm_data_range(0);
444 ret
= __alloc_data_range(0);
450 struct data_range
*alloc_range(sval_t min
, sval_t max
)
452 return alloc_range_helper_sval(min
, max
, 0);
455 struct data_range
*alloc_range_perm(sval_t min
, sval_t max
)
457 return alloc_range_helper_sval(min
, max
, 1);
460 struct range_list
*alloc_rl(sval_t min
, sval_t max
)
462 struct range_list
*rl
= NULL
;
464 if (sval_cmp(min
, max
) > 0)
465 return alloc_whole_rl(min
.type
);
467 add_range(&rl
, min
, max
);
471 struct range_list
*alloc_whole_rl(struct symbol
*type
)
473 if (!type
|| type_positive_bits(type
) < 0)
475 if (type
->type
== SYM_ARRAY
)
478 return alloc_rl(sval_type_min(type
), sval_type_max(type
));
481 void add_range(struct range_list
**list
, sval_t min
, sval_t max
)
483 struct data_range
*tmp
= NULL
;
484 struct data_range
*new = NULL
;
488 * FIXME: This has a problem merging a range_list like: min-0,3-max
489 * with a range like 1-2. You end up with min-2,3-max instead of
492 FOR_EACH_PTR(*list
, tmp
) {
494 /* Sometimes we overlap with more than one range
495 so we have to delete or modify the next range. */
496 if (max
.value
+ 1 == tmp
->min
.value
) {
497 /* join 2 ranges here */
499 DELETE_CURRENT_PTR(tmp
);
503 /* Doesn't overlap with the next one. */
504 if (sval_cmp(max
, tmp
->min
) < 0)
506 /* Partially overlaps with the next one. */
507 if (sval_cmp(max
, tmp
->max
) < 0) {
508 tmp
->min
.value
= max
.value
+ 1;
511 /* Completely overlaps with the next one. */
512 if (sval_cmp(max
, tmp
->max
) >= 0) {
513 DELETE_CURRENT_PTR(tmp
);
514 /* there could be more ranges to delete */
518 if (!sval_is_max(max
) && max
.value
+ 1 == tmp
->min
.value
) {
519 /* join 2 ranges into a big range */
520 new = alloc_range(min
, tmp
->max
);
521 REPLACE_CURRENT_PTR(tmp
, new);
524 if (sval_cmp(max
, tmp
->min
) < 0) { /* new range entirely below */
525 new = alloc_range(min
, max
);
526 INSERT_CURRENT(new, tmp
);
529 if (sval_cmp(min
, tmp
->min
) < 0) { /* new range partially below */
530 if (sval_cmp(max
, tmp
->max
) < 0)
534 new = alloc_range(min
, max
);
535 REPLACE_CURRENT_PTR(tmp
, new);
540 if (sval_cmp(max
, tmp
->max
) <= 0) /* new range already included */
542 if (sval_cmp(min
, tmp
->max
) <= 0) { /* new range partially above */
544 new = alloc_range(min
, max
);
545 REPLACE_CURRENT_PTR(tmp
, new);
549 if (!sval_is_min(min
) && min
.value
- 1 == tmp
->max
.value
) {
550 /* join 2 ranges into a big range */
551 new = alloc_range(tmp
->min
, max
);
552 REPLACE_CURRENT_PTR(tmp
, new);
556 /* the new range is entirely above the existing ranges */
557 } END_FOR_EACH_PTR(tmp
);
560 new = alloc_range(min
, max
);
561 add_ptr_list(list
, new);
564 struct range_list
*clone_rl(struct range_list
*list
)
566 struct data_range
*tmp
;
567 struct range_list
*ret
= NULL
;
569 FOR_EACH_PTR(list
, tmp
) {
570 add_ptr_list(&ret
, tmp
);
571 } END_FOR_EACH_PTR(tmp
);
575 struct range_list
*clone_rl_permanent(struct range_list
*list
)
577 struct data_range
*tmp
;
578 struct data_range
*new;
579 struct range_list
*ret
= NULL
;
581 FOR_EACH_PTR(list
, tmp
) {
582 new = alloc_range_perm(tmp
->min
, tmp
->max
);
583 add_ptr_list(&ret
, new);
584 } END_FOR_EACH_PTR(tmp
);
588 struct range_list
*rl_union(struct range_list
*one
, struct range_list
*two
)
590 struct data_range
*tmp
;
591 struct range_list
*ret
= NULL
;
593 FOR_EACH_PTR(one
, tmp
) {
594 add_range(&ret
, tmp
->min
, tmp
->max
);
595 } END_FOR_EACH_PTR(tmp
);
596 FOR_EACH_PTR(two
, tmp
) {
597 add_range(&ret
, tmp
->min
, tmp
->max
);
598 } END_FOR_EACH_PTR(tmp
);
602 struct range_list
*remove_range(struct range_list
*list
, sval_t min
, sval_t max
)
604 struct data_range
*tmp
;
605 struct range_list
*ret
= NULL
;
607 FOR_EACH_PTR(list
, tmp
) {
608 if (sval_cmp(tmp
->max
, min
) < 0) {
609 add_range(&ret
, tmp
->min
, tmp
->max
);
612 if (sval_cmp(tmp
->min
, max
) > 0) {
613 add_range(&ret
, tmp
->min
, tmp
->max
);
616 if (sval_cmp(tmp
->min
, min
) >= 0 && sval_cmp(tmp
->max
, max
) <= 0)
618 if (sval_cmp(tmp
->min
, min
) >= 0) {
620 add_range(&ret
, max
, tmp
->max
);
621 } else if (sval_cmp(tmp
->max
, max
) <= 0) {
623 add_range(&ret
, tmp
->min
, min
);
627 add_range(&ret
, tmp
->min
, min
);
628 add_range(&ret
, max
, tmp
->max
);
630 } END_FOR_EACH_PTR(tmp
);
634 int ranges_equiv(struct data_range
*one
, struct data_range
*two
)
640 if (sval_cmp(one
->min
, two
->min
) != 0)
642 if (sval_cmp(one
->max
, two
->max
) != 0)
647 int rl_equiv(struct range_list
*one
, struct range_list
*two
)
649 struct data_range
*one_range
;
650 struct data_range
*two_range
;
655 PREPARE_PTR_LIST(one
, one_range
);
656 PREPARE_PTR_LIST(two
, two_range
);
658 if (!one_range
&& !two_range
)
660 if (!ranges_equiv(one_range
, two_range
))
662 NEXT_PTR_LIST(one_range
);
663 NEXT_PTR_LIST(two_range
);
665 FINISH_PTR_LIST(two_range
);
666 FINISH_PTR_LIST(one_range
);
671 int true_comparison_range(struct data_range
*left
, int comparison
, struct data_range
*right
)
673 switch (comparison
) {
675 case SPECIAL_UNSIGNED_LT
:
676 if (sval_cmp(left
->min
, right
->max
) < 0)
679 case SPECIAL_UNSIGNED_LTE
:
681 if (sval_cmp(left
->min
, right
->max
) <= 0)
685 if (sval_cmp(left
->max
, right
->min
) < 0)
687 if (sval_cmp(left
->min
, right
->max
) > 0)
690 case SPECIAL_UNSIGNED_GTE
:
692 if (sval_cmp(left
->max
, right
->min
) >= 0)
696 case SPECIAL_UNSIGNED_GT
:
697 if (sval_cmp(left
->max
, right
->min
) > 0)
700 case SPECIAL_NOTEQUAL
:
701 if (sval_cmp(left
->min
, left
->max
) != 0)
703 if (sval_cmp(right
->min
, right
->max
) != 0)
705 if (sval_cmp(left
->min
, right
->min
) != 0)
709 sm_msg("unhandled comparison %d\n", comparison
);
715 int true_comparison_range_LR(int comparison
, struct data_range
*var
, struct data_range
*val
, int left
)
718 return true_comparison_range(var
, comparison
, val
);
720 return true_comparison_range(val
, comparison
, var
);
723 static int false_comparison_range_sval(struct data_range
*left
, int comparison
, struct data_range
*right
)
725 switch (comparison
) {
727 case SPECIAL_UNSIGNED_LT
:
728 if (sval_cmp(left
->max
, right
->min
) >= 0)
731 case SPECIAL_UNSIGNED_LTE
:
733 if (sval_cmp(left
->max
, right
->min
) > 0)
737 if (sval_cmp(left
->min
, left
->max
) != 0)
739 if (sval_cmp(right
->min
, right
->max
) != 0)
741 if (sval_cmp(left
->min
, right
->min
) != 0)
744 case SPECIAL_UNSIGNED_GTE
:
746 if (sval_cmp(left
->min
, right
->max
) < 0)
750 case SPECIAL_UNSIGNED_GT
:
751 if (sval_cmp(left
->min
, right
->max
) <= 0)
754 case SPECIAL_NOTEQUAL
:
755 if (sval_cmp(left
->max
, right
->min
) < 0)
757 if (sval_cmp(left
->min
, right
->max
) > 0)
761 sm_msg("unhandled comparison %d\n", comparison
);
767 int false_comparison_range_LR(int comparison
, struct data_range
*var
, struct data_range
*val
, int left
)
770 return false_comparison_range_sval(var
, comparison
, val
);
772 return false_comparison_range_sval(val
, comparison
, var
);
775 int possibly_true(struct expression
*left
, int comparison
, struct expression
*right
)
777 struct range_list
*rl_left
, *rl_right
;
778 struct data_range
*tmp_left
, *tmp_right
;
780 if (!get_implied_rl(left
, &rl_left
))
782 if (!get_implied_rl(right
, &rl_right
))
785 FOR_EACH_PTR(rl_left
, tmp_left
) {
786 FOR_EACH_PTR(rl_right
, tmp_right
) {
787 if (true_comparison_range(tmp_left
, comparison
, tmp_right
))
789 } END_FOR_EACH_PTR(tmp_right
);
790 } END_FOR_EACH_PTR(tmp_left
);
794 int possibly_false(struct expression
*left
, int comparison
, struct expression
*right
)
796 struct range_list
*rl_left
, *rl_right
;
797 struct data_range
*tmp_left
, *tmp_right
;
799 if (!get_implied_rl(left
, &rl_left
))
801 if (!get_implied_rl(right
, &rl_right
))
804 FOR_EACH_PTR(rl_left
, tmp_left
) {
805 FOR_EACH_PTR(rl_right
, tmp_right
) {
806 if (false_comparison_range_sval(tmp_left
, comparison
, tmp_right
))
808 } END_FOR_EACH_PTR(tmp_right
);
809 } END_FOR_EACH_PTR(tmp_left
);
813 int possibly_true_rl(struct range_list
*left_ranges
, int comparison
, struct range_list
*right_ranges
)
815 struct data_range
*left_tmp
, *right_tmp
;
817 if (!left_ranges
|| !right_ranges
)
820 FOR_EACH_PTR(left_ranges
, left_tmp
) {
821 FOR_EACH_PTR(right_ranges
, right_tmp
) {
822 if (true_comparison_range(left_tmp
, comparison
, right_tmp
))
824 } END_FOR_EACH_PTR(right_tmp
);
825 } END_FOR_EACH_PTR(left_tmp
);
829 int possibly_false_rl(struct range_list
*left_ranges
, int comparison
, struct range_list
*right_ranges
)
831 struct data_range
*left_tmp
, *right_tmp
;
833 if (!left_ranges
|| !right_ranges
)
836 FOR_EACH_PTR(left_ranges
, left_tmp
) {
837 FOR_EACH_PTR(right_ranges
, right_tmp
) {
838 if (false_comparison_range_sval(left_tmp
, comparison
, right_tmp
))
840 } END_FOR_EACH_PTR(right_tmp
);
841 } END_FOR_EACH_PTR(left_tmp
);
845 /* FIXME: the _rl here stands for right left so really it should be _lr */
846 int possibly_true_rl_LR(int comparison
, struct range_list
*a
, struct range_list
*b
, int left
)
849 return possibly_true_rl(a
, comparison
, b
);
851 return possibly_true_rl(b
, comparison
, a
);
854 int possibly_false_rl_LR(int comparison
, struct range_list
*a
, struct range_list
*b
, int left
)
857 return possibly_false_rl(a
, comparison
, b
);
859 return possibly_false_rl(b
, comparison
, a
);
862 int rl_has_sval(struct range_list
*rl
, sval_t sval
)
864 struct data_range
*tmp
;
866 FOR_EACH_PTR(rl
, tmp
) {
867 if (sval_cmp(tmp
->min
, sval
) <= 0 &&
868 sval_cmp(tmp
->max
, sval
) >= 0)
870 } END_FOR_EACH_PTR(tmp
);
874 void tack_on(struct range_list
**list
, struct data_range
*drange
)
876 add_ptr_list(list
, drange
);
879 void push_rl(struct range_list_stack
**rl_stack
, struct range_list
*rl
)
881 add_ptr_list(rl_stack
, rl
);
884 struct range_list
*pop_rl(struct range_list_stack
**rl_stack
)
886 struct range_list
*rl
;
888 rl
= last_ptr_list((struct ptr_list
*)*rl_stack
);
889 delete_ptr_list_last((struct ptr_list
**)rl_stack
);
893 struct range_list
*top_rl(struct range_list_stack
*rl_stack
)
895 struct range_list
*rl
;
897 rl
= last_ptr_list((struct ptr_list
*)rl_stack
);
901 void filter_top_rl(struct range_list_stack
**rl_stack
, sval_t sval
)
903 struct range_list
*rl
;
905 rl
= pop_rl(rl_stack
);
906 rl
= remove_range(rl
, sval
, sval
);
907 push_rl(rl_stack
, rl
);
910 static int sval_too_big(struct symbol
*type
, sval_t sval
)
912 if (type_bits(type
) == 64)
914 if (sval
.uvalue
> ((1ULL << type_bits(type
)) - 1))
919 static void add_range_t(struct symbol
*type
, struct range_list
**rl
, sval_t min
, sval_t max
)
921 /* If we're just adding a number, cast it and add it */
922 if (sval_cmp(min
, max
) == 0) {
923 add_range(rl
, sval_cast(type
, min
), sval_cast(type
, max
));
927 /* If the range is within the type range then add it */
928 if (sval_fits(type
, min
) && sval_fits(type
, max
)) {
929 add_range(rl
, sval_cast(type
, min
), sval_cast(type
, max
));
934 * If the range we are adding has more bits than the range type then
935 * add the whole range type. Eg:
936 * 0x8000000000000000 - 0xf000000000000000 -> cast to int
937 * This isn't totally the right thing to do. We could be more granular.
939 if (sval_too_big(type
, min
) || sval_too_big(type
, max
)) {
940 add_range(rl
, sval_type_min(type
), sval_type_max(type
));
944 /* Cast negative values to high positive values */
945 if (sval_is_negative(min
) && type_unsigned(type
)) {
946 if (sval_is_positive(max
)) {
947 if (sval_too_high(type
, max
)) {
948 add_range(rl
, sval_type_min(type
), sval_type_max(type
));
951 add_range(rl
, sval_type_val(type
, 0), sval_cast(type
, max
));
952 max
= sval_type_max(type
);
954 max
= sval_cast(type
, max
);
956 min
= sval_cast(type
, min
);
957 add_range(rl
, min
, max
);
960 /* Cast high positive numbers to negative */
961 if (sval_unsigned(max
) && sval_is_negative(sval_cast(type
, max
))) {
962 if (!sval_is_negative(sval_cast(type
, min
))) {
963 add_range(rl
, sval_cast(type
, min
), sval_type_max(type
));
964 min
= sval_type_min(type
);
966 min
= sval_cast(type
, min
);
968 max
= sval_cast(type
, max
);
969 add_range(rl
, min
, max
);
975 struct range_list
*rl_truncate_cast(struct symbol
*type
, struct range_list
*rl
)
977 struct data_range
*tmp
;
978 struct range_list
*ret
= NULL
;
984 if (!type
|| type
== rl_type(rl
))
987 FOR_EACH_PTR(rl
, tmp
) {
990 if (type_bits(type
) < type_bits(rl_type(rl
))) {
991 min
.uvalue
= tmp
->min
.uvalue
& ((1ULL << type_bits(type
)) - 1);
992 max
.uvalue
= tmp
->max
.uvalue
& ((1ULL << type_bits(type
)) - 1);
994 if (sval_cmp(min
, max
) > 0) {
995 min
= sval_cast(type
, min
);
996 max
= sval_cast(type
, max
);
998 add_range_t(type
, &ret
, min
, max
);
999 } END_FOR_EACH_PTR(tmp
);
1004 static int rl_is_sane(struct range_list
*rl
)
1006 struct data_range
*tmp
;
1007 struct symbol
*type
;
1010 FOR_EACH_PTR(rl
, tmp
) {
1011 if (!sval_fits(type
, tmp
->min
))
1013 if (!sval_fits(type
, tmp
->max
))
1015 if (sval_cmp(tmp
->min
, tmp
->max
) > 0)
1017 } END_FOR_EACH_PTR(tmp
);
1022 static int rl_type_consistent(struct range_list
*rl
)
1024 struct data_range
*tmp
;
1025 struct symbol
*type
;
1028 FOR_EACH_PTR(rl
, tmp
) {
1029 if (type
!= tmp
->min
.type
|| type
!= tmp
->max
.type
)
1031 } END_FOR_EACH_PTR(tmp
);
1035 struct range_list
*cast_rl(struct symbol
*type
, struct range_list
*rl
)
1037 struct data_range
*tmp
;
1038 struct range_list
*ret
= NULL
;
1045 if (!rl_is_sane(rl
))
1046 return alloc_whole_rl(type
);
1047 if (type
== rl_type(rl
) && rl_type_consistent(rl
))
1050 FOR_EACH_PTR(rl
, tmp
) {
1051 add_range_t(type
, &ret
, tmp
->min
, tmp
->max
);
1052 } END_FOR_EACH_PTR(tmp
);
1055 return alloc_whole_rl(type
);
1060 struct range_list
*rl_invert(struct range_list
*orig
)
1062 struct range_list
*ret
= NULL
;
1063 struct data_range
*tmp
;
1064 sval_t gap_min
, abs_max
, sval
;
1069 gap_min
= sval_type_min(rl_min(orig
).type
);
1070 abs_max
= sval_type_max(rl_max(orig
).type
);
1072 FOR_EACH_PTR(orig
, tmp
) {
1073 if (sval_cmp(tmp
->min
, gap_min
) > 0) {
1074 sval
= sval_type_val(tmp
->min
.type
, tmp
->min
.value
- 1);
1075 add_range(&ret
, gap_min
, sval
);
1077 gap_min
= sval_type_val(tmp
->max
.type
, tmp
->max
.value
+ 1);
1078 if (sval_cmp(tmp
->max
, abs_max
) == 0)
1080 } END_FOR_EACH_PTR(tmp
);
1082 if (sval_cmp(gap_min
, abs_max
) < 0)
1083 add_range(&ret
, gap_min
, abs_max
);
1088 struct range_list
*rl_filter(struct range_list
*rl
, struct range_list
*filter
)
1090 struct data_range
*tmp
;
1092 FOR_EACH_PTR(filter
, tmp
) {
1093 rl
= remove_range(rl
, tmp
->min
, tmp
->max
);
1094 } END_FOR_EACH_PTR(tmp
);
1099 struct range_list
*rl_intersection(struct range_list
*one
, struct range_list
*two
)
1103 two
= rl_invert(two
);
1104 return rl_filter(one
, two
);
1107 void free_rl(struct range_list
**rlist
)
1109 __free_ptr_list((struct ptr_list
**)rlist
);
1112 static void free_single_dinfo(struct data_info
*dinfo
)
1114 free_rl(&dinfo
->value_ranges
);
1117 static void free_dinfos(struct allocation_blob
*blob
)
1119 unsigned int size
= sizeof(struct data_info
);
1120 unsigned int offset
= 0;
1122 while (offset
< blob
->offset
) {
1123 free_single_dinfo((struct data_info
*)(blob
->data
+ offset
));
1128 void free_data_info_allocs(void)
1130 struct allocator_struct
*desc
= &data_info_allocator
;
1131 struct allocation_blob
*blob
= desc
->blobs
;
1134 desc
->allocations
= 0;
1135 desc
->total_bytes
= 0;
1136 desc
->useful_bytes
= 0;
1137 desc
->freelist
= NULL
;
1139 struct allocation_blob
*next
= blob
->next
;
1141 blob_free(blob
, desc
->chunking
);
1144 clear_data_range_alloc();