2 * Copyright (C) 2009 Dan Carpenter.
4 * This program is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU General Public License
6 * as published by the Free Software Foundation; either version 2
7 * of the License, or (at your option) any later version.
9 * This program is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 * GNU General Public License for more details.
14 * You should have received a copy of the GNU General Public License
15 * along with this program; if not, see http://www.gnu.org/copyleft/gpl.txt
20 #include "smatch_extra.h"
21 #include "smatch_slist.h"
23 ALLOCATOR(data_info
, "smatch extra data");
24 ALLOCATOR(data_range
, "data range");
25 __DO_ALLOCATOR(struct data_range
, sizeof(struct data_range
), __alignof__(struct data_range
),
26 "permanent ranges", perm_data_range
);
28 char *show_rl(struct range_list
*list
)
30 struct data_range
*tmp
;
36 FOR_EACH_PTR(list
, tmp
) {
38 strncat(full
, ",", 254 - strlen(full
));
39 if (sval_cmp(tmp
->min
, tmp
->max
) == 0) {
40 strncat(full
, sval_to_str(tmp
->min
), 254 - strlen(full
));
43 strncat(full
, sval_to_str(tmp
->min
), 254 - strlen(full
));
44 strncat(full
, "-", 254 - strlen(full
));
45 strncat(full
, sval_to_str(tmp
->max
), 254 - strlen(full
));
46 } END_FOR_EACH_PTR(tmp
);
47 return alloc_sname(full
);
50 static int str_to_comparison_arg_helper(const char *str
,
51 struct expression
*call
, int *comparison
,
52 struct expression
**arg
, char **endp
)
55 char *c
= (char *)str
;
64 *comparison
= SPECIAL_LTE
;
69 } else if (*c
== '=') {
72 *comparison
= SPECIAL_EQUAL
;
73 } else if (*c
== '>') {
76 *comparison
= SPECIAL_GTE
;
81 } else if (*c
== '!') {
84 *comparison
= SPECIAL_NOTEQUAL
;
93 param
= strtoll(c
, &c
, 10);
94 c
++; /* skip the ']' character */
100 *arg
= get_argument_from_call_expr(call
->args
, param
);
106 int str_to_comparison_arg(const char *str
, struct expression
*call
, int *comparison
, struct expression
**arg
)
115 return str_to_comparison_arg_helper(str
, call
, comparison
, arg
, NULL
);
118 static int get_val_from_key(int use_max
, struct symbol
*type
, char *c
, struct expression
*call
, char **endp
, sval_t
*sval
)
120 struct expression
*arg
;
125 ret
= sval_type_max(type
);
127 ret
= sval_type_min(type
);
129 if (!str_to_comparison_arg_helper(c
, call
, &comparison
, &arg
, endp
)) {
134 if (use_max
&& get_implied_max(arg
, &tmp
)) {
136 if (comparison
== '<') {
138 ret
= sval_binop(ret
, '-', tmp
);
141 if (!use_max
&& get_implied_min(arg
, &tmp
)) {
143 if (comparison
== '>') {
145 ret
= sval_binop(ret
, '+', tmp
);
153 static sval_t
add_one(sval_t sval
)
159 static sval_t
sub_one(sval_t sval
)
165 void filter_by_comparison(struct range_list
**rl
, int comparison
, struct range_list
*right
)
167 struct range_list
*left_orig
= *rl
;
168 struct range_list
*right_orig
= right
;
169 struct range_list
*ret_rl
= *rl
;
170 struct symbol
*cast_type
;
173 cast_type
= rl_type(left_orig
);
174 if (sval_type_max(rl_type(left_orig
)).uvalue
< sval_type_max(rl_type(right_orig
)).uvalue
)
175 cast_type
= rl_type(right_orig
);
176 if (sval_type_max(cast_type
).uvalue
< INT_MAX
)
177 cast_type
= &int_ctype
;
179 min
= sval_type_min(cast_type
);
180 max
= sval_type_max(cast_type
);
181 left_orig
= cast_rl(cast_type
, left_orig
);
182 right_orig
= cast_rl(cast_type
, right_orig
);
184 switch (comparison
) {
186 case SPECIAL_UNSIGNED_LT
:
187 ret_rl
= remove_range(left_orig
, rl_max(right_orig
), max
);
190 case SPECIAL_UNSIGNED_LTE
:
191 if (!sval_is_max(rl_max(right_orig
)))
192 ret_rl
= remove_range(left_orig
, add_one(rl_max(right_orig
)), max
);
195 if (!sval_is_max(rl_max(right_orig
)))
196 ret_rl
= remove_range(ret_rl
, add_one(rl_max(right_orig
)), max
);
197 if (!sval_is_min(rl_min(right_orig
)))
198 ret_rl
= remove_range(ret_rl
, min
, sub_one(rl_min(right_orig
)));
201 case SPECIAL_UNSIGNED_GTE
:
202 if (!sval_is_min(rl_min(right_orig
)))
203 ret_rl
= remove_range(left_orig
, min
, sub_one(rl_min(right_orig
)));
206 case SPECIAL_UNSIGNED_GT
:
207 ret_rl
= remove_range(left_orig
, min
, rl_min(right_orig
));
209 case SPECIAL_NOTEQUAL
:
210 if (sval_cmp(rl_min(right_orig
), rl_max(right_orig
)) == 0)
211 ret_rl
= remove_range(left_orig
, rl_min(right_orig
), rl_min(right_orig
));
214 sm_msg("internal error: unhandled comparison %s", show_special(comparison
));
218 *rl
= cast_rl(rl_type(*rl
), ret_rl
);
221 static struct range_list
*filter_by_comparison_call(char *c
, struct expression
*call
, char **endp
, struct range_list
*start_rl
)
223 struct expression
*arg
;
224 struct range_list
*right_orig
;
227 if (!str_to_comparison_arg_helper(c
, call
, &comparison
, &arg
, endp
))
230 if (!get_implied_rl(arg
, &right_orig
))
233 filter_by_comparison(&start_rl
, comparison
, right_orig
);
237 static sval_t
parse_val(int use_max
, struct expression
*call
, struct symbol
*type
, char *c
, char **endp
)
242 if (!strncmp(start
, "max", 3)) {
243 ret
= sval_type_max(type
);
245 } else if (!strncmp(start
, "u64max", 6)) {
246 ret
= sval_type_val(type
, ULLONG_MAX
);
248 } else if (!strncmp(start
, "s64max", 6)) {
249 ret
= sval_type_val(type
, LLONG_MAX
);
251 } else if (!strncmp(start
, "u32max", 6)) {
252 ret
= sval_type_val(type
, UINT_MAX
);
254 } else if (!strncmp(start
, "s32max", 6)) {
255 ret
= sval_type_val(type
, INT_MAX
);
257 } else if (!strncmp(start
, "u16max", 6)) {
258 ret
= sval_type_val(type
, USHRT_MAX
);
260 } else if (!strncmp(start
, "s16max", 6)) {
261 ret
= sval_type_val(type
, SHRT_MAX
);
263 } else if (!strncmp(start
, "min", 3)) {
264 ret
= sval_type_min(type
);
266 } else if (!strncmp(start
, "s64min", 6)) {
267 ret
= sval_type_val(type
, LLONG_MIN
);
269 } else if (!strncmp(start
, "s32min", 6)) {
270 ret
= sval_type_val(type
, INT_MIN
);
272 } else if (!strncmp(start
, "s16min", 6)) {
273 ret
= sval_type_val(type
, SHRT_MIN
);
275 } else if (!strncmp(start
, "long_min", 8)) {
276 ret
= sval_type_val(type
, LONG_MIN
);
278 } else if (!strncmp(start
, "long_max", 8)) {
279 ret
= sval_type_val(type
, LONG_MAX
);
281 } else if (!strncmp(start
, "ulong_max", 9)) {
282 ret
= sval_type_val(type
, ULONG_MAX
);
284 } else if (!strncmp(start
, "ptr_max", 7)) {
285 ret
= sval_type_val(type
, valid_ptr_max
);
287 } else if (start
[0] == '[') {
288 /* this parses [==p0] comparisons */
289 get_val_from_key(1, type
, start
, call
, &c
, &ret
);
291 ret
= sval_type_val(type
, strtoll(start
, &c
, 10));
297 static char *jump_to_call_math(char *value
)
301 while (*c
&& *c
!= '[')
307 if (*c
== '<' || *c
== '=' || *c
== '>' || *c
== '!')
313 static void str_to_rl_helper(struct expression
*call
, struct symbol
*type
, char *str
, char **endp
, struct range_list
**rl
)
315 struct range_list
*rl_tmp
= NULL
;
319 min
= sval_type_min(type
);
320 max
= sval_type_max(type
);
322 while (*c
!= '\0' && *c
!= '[') {
325 min
= parse_val(0, call
, type
, c
, &c
);
329 if (*c
== '\0' || *c
== '[') {
330 add_range(&rl_tmp
, min
, min
);
334 add_range(&rl_tmp
, min
, min
);
339 sm_msg("debug XXX: trouble parsing %s c = %s", str
, c
);
345 max
= parse_val(1, call
, type
, c
, &c
);
346 add_range(&rl_tmp
, min
, max
);
357 static void str_to_dinfo(struct expression
*call
, struct symbol
*type
, char *value
, struct data_info
*dinfo
)
359 struct range_list
*math_rl
;
362 struct range_list
*rl
= NULL
;
367 if (strcmp(value
, "empty") == 0)
370 if (strncmp(value
, "[==$", 4) == 0) {
371 struct expression
*arg
;
374 if (!str_to_comparison_arg(value
, call
, &comparison
, &arg
))
376 if (!get_implied_rl(arg
, &rl
))
381 str_to_rl_helper(call
, type
, value
, &c
, &rl
);
385 call_math
= jump_to_call_math(value
);
386 if (call_math
&& parse_call_math_rl(call
, call_math
, &math_rl
)) {
387 rl
= rl_intersection(rl
, math_rl
);
392 * For now if we already tried to handle the call math and couldn't
393 * figure it out then bail.
395 if (jump_to_call_math(c
) == c
+ 1)
398 rl
= filter_by_comparison_call(c
, call
, &c
, rl
);
401 rl
= cast_rl(type
, rl
);
402 dinfo
->value_ranges
= rl
;
405 void str_to_rl(struct symbol
*type
, char *value
, struct range_list
**rl
)
407 struct data_info dinfo
= {};
409 str_to_dinfo(NULL
, type
, value
, &dinfo
);
410 *rl
= dinfo
.value_ranges
;
413 void call_results_to_rl(struct expression
*expr
, struct symbol
*type
, char *value
, struct range_list
**rl
)
415 struct data_info dinfo
= {};
417 str_to_dinfo(strip_expr(expr
), type
, value
, &dinfo
);
418 *rl
= dinfo
.value_ranges
;
421 int is_whole_rl(struct range_list
*rl
)
423 struct data_range
*drange
;
425 if (ptr_list_empty(rl
))
427 drange
= first_ptr_list((struct ptr_list
*)rl
);
428 if (sval_is_min(drange
->min
) && sval_is_max(drange
->max
))
433 sval_t
rl_min(struct range_list
*rl
)
435 struct data_range
*drange
;
438 ret
.type
= &llong_ctype
;
439 ret
.value
= LLONG_MIN
;
440 if (ptr_list_empty(rl
))
442 drange
= first_ptr_list((struct ptr_list
*)rl
);
446 sval_t
rl_max(struct range_list
*rl
)
448 struct data_range
*drange
;
451 ret
.type
= &llong_ctype
;
452 ret
.value
= LLONG_MAX
;
453 if (ptr_list_empty(rl
))
455 drange
= last_ptr_list((struct ptr_list
*)rl
);
459 int rl_to_sval(struct range_list
*rl
, sval_t
*sval
)
468 if (sval_cmp(min
, max
) != 0)
474 struct symbol
*rl_type(struct range_list
*rl
)
478 return rl_min(rl
).type
;
481 static struct data_range
*alloc_range_helper_sval(sval_t min
, sval_t max
, int perm
)
483 struct data_range
*ret
;
486 ret
= __alloc_perm_data_range(0);
488 ret
= __alloc_data_range(0);
494 struct data_range
*alloc_range(sval_t min
, sval_t max
)
496 return alloc_range_helper_sval(min
, max
, 0);
499 struct data_range
*alloc_range_perm(sval_t min
, sval_t max
)
501 return alloc_range_helper_sval(min
, max
, 1);
504 struct range_list
*alloc_rl(sval_t min
, sval_t max
)
506 struct range_list
*rl
= NULL
;
508 if (sval_cmp(min
, max
) > 0)
509 return alloc_whole_rl(min
.type
);
511 add_range(&rl
, min
, max
);
515 struct range_list
*alloc_whole_rl(struct symbol
*type
)
517 if (!type
|| type_positive_bits(type
) < 0)
519 if (type
->type
== SYM_ARRAY
)
522 return alloc_rl(sval_type_min(type
), sval_type_max(type
));
525 void add_range(struct range_list
**list
, sval_t min
, sval_t max
)
527 struct data_range
*tmp
= NULL
;
528 struct data_range
*new = NULL
;
532 * FIXME: This has a problem merging a range_list like: min-0,3-max
533 * with a range like 1-2. You end up with min-2,3-max instead of
536 FOR_EACH_PTR(*list
, tmp
) {
538 /* Sometimes we overlap with more than one range
539 so we have to delete or modify the next range. */
540 if (max
.value
+ 1 == tmp
->min
.value
) {
541 /* join 2 ranges here */
543 DELETE_CURRENT_PTR(tmp
);
547 /* Doesn't overlap with the next one. */
548 if (sval_cmp(max
, tmp
->min
) < 0)
550 /* Partially overlaps with the next one. */
551 if (sval_cmp(max
, tmp
->max
) < 0) {
552 tmp
->min
.value
= max
.value
+ 1;
555 /* Completely overlaps with the next one. */
556 if (sval_cmp(max
, tmp
->max
) >= 0) {
557 DELETE_CURRENT_PTR(tmp
);
558 /* there could be more ranges to delete */
562 if (!sval_is_max(max
) && max
.value
+ 1 == tmp
->min
.value
) {
563 /* join 2 ranges into a big range */
564 new = alloc_range(min
, tmp
->max
);
565 REPLACE_CURRENT_PTR(tmp
, new);
568 if (sval_cmp(max
, tmp
->min
) < 0) { /* new range entirely below */
569 new = alloc_range(min
, max
);
570 INSERT_CURRENT(new, tmp
);
573 if (sval_cmp(min
, tmp
->min
) < 0) { /* new range partially below */
574 if (sval_cmp(max
, tmp
->max
) < 0)
578 new = alloc_range(min
, max
);
579 REPLACE_CURRENT_PTR(tmp
, new);
584 if (sval_cmp(max
, tmp
->max
) <= 0) /* new range already included */
586 if (sval_cmp(min
, tmp
->max
) <= 0) { /* new range partially above */
588 new = alloc_range(min
, max
);
589 REPLACE_CURRENT_PTR(tmp
, new);
593 if (!sval_is_min(min
) && min
.value
- 1 == tmp
->max
.value
) {
594 /* join 2 ranges into a big range */
595 new = alloc_range(tmp
->min
, max
);
596 REPLACE_CURRENT_PTR(tmp
, new);
600 /* the new range is entirely above the existing ranges */
601 } END_FOR_EACH_PTR(tmp
);
604 new = alloc_range(min
, max
);
605 add_ptr_list(list
, new);
608 struct range_list
*clone_rl(struct range_list
*list
)
610 struct data_range
*tmp
;
611 struct range_list
*ret
= NULL
;
613 FOR_EACH_PTR(list
, tmp
) {
614 add_ptr_list(&ret
, tmp
);
615 } END_FOR_EACH_PTR(tmp
);
619 struct range_list
*clone_rl_permanent(struct range_list
*list
)
621 struct data_range
*tmp
;
622 struct data_range
*new;
623 struct range_list
*ret
= NULL
;
625 FOR_EACH_PTR(list
, tmp
) {
626 new = alloc_range_perm(tmp
->min
, tmp
->max
);
627 add_ptr_list(&ret
, new);
628 } END_FOR_EACH_PTR(tmp
);
632 struct range_list
*rl_union(struct range_list
*one
, struct range_list
*two
)
634 struct data_range
*tmp
;
635 struct range_list
*ret
= NULL
;
637 FOR_EACH_PTR(one
, tmp
) {
638 add_range(&ret
, tmp
->min
, tmp
->max
);
639 } END_FOR_EACH_PTR(tmp
);
640 FOR_EACH_PTR(two
, tmp
) {
641 add_range(&ret
, tmp
->min
, tmp
->max
);
642 } END_FOR_EACH_PTR(tmp
);
646 struct range_list
*remove_range(struct range_list
*list
, sval_t min
, sval_t max
)
648 struct data_range
*tmp
;
649 struct range_list
*ret
= NULL
;
651 FOR_EACH_PTR(list
, tmp
) {
652 if (sval_cmp(tmp
->max
, min
) < 0) {
653 add_range(&ret
, tmp
->min
, tmp
->max
);
656 if (sval_cmp(tmp
->min
, max
) > 0) {
657 add_range(&ret
, tmp
->min
, tmp
->max
);
660 if (sval_cmp(tmp
->min
, min
) >= 0 && sval_cmp(tmp
->max
, max
) <= 0)
662 if (sval_cmp(tmp
->min
, min
) >= 0) {
664 add_range(&ret
, max
, tmp
->max
);
665 } else if (sval_cmp(tmp
->max
, max
) <= 0) {
667 add_range(&ret
, tmp
->min
, min
);
671 add_range(&ret
, tmp
->min
, min
);
672 add_range(&ret
, max
, tmp
->max
);
674 } END_FOR_EACH_PTR(tmp
);
678 int ranges_equiv(struct data_range
*one
, struct data_range
*two
)
684 if (sval_cmp(one
->min
, two
->min
) != 0)
686 if (sval_cmp(one
->max
, two
->max
) != 0)
691 int rl_equiv(struct range_list
*one
, struct range_list
*two
)
693 struct data_range
*one_range
;
694 struct data_range
*two_range
;
699 PREPARE_PTR_LIST(one
, one_range
);
700 PREPARE_PTR_LIST(two
, two_range
);
702 if (!one_range
&& !two_range
)
704 if (!ranges_equiv(one_range
, two_range
))
706 NEXT_PTR_LIST(one_range
);
707 NEXT_PTR_LIST(two_range
);
709 FINISH_PTR_LIST(two_range
);
710 FINISH_PTR_LIST(one_range
);
715 int true_comparison_range(struct data_range
*left
, int comparison
, struct data_range
*right
)
717 switch (comparison
) {
719 case SPECIAL_UNSIGNED_LT
:
720 if (sval_cmp(left
->min
, right
->max
) < 0)
723 case SPECIAL_UNSIGNED_LTE
:
725 if (sval_cmp(left
->min
, right
->max
) <= 0)
729 if (sval_cmp(left
->max
, right
->min
) < 0)
731 if (sval_cmp(left
->min
, right
->max
) > 0)
734 case SPECIAL_UNSIGNED_GTE
:
736 if (sval_cmp(left
->max
, right
->min
) >= 0)
740 case SPECIAL_UNSIGNED_GT
:
741 if (sval_cmp(left
->max
, right
->min
) > 0)
744 case SPECIAL_NOTEQUAL
:
745 if (sval_cmp(left
->min
, left
->max
) != 0)
747 if (sval_cmp(right
->min
, right
->max
) != 0)
749 if (sval_cmp(left
->min
, right
->min
) != 0)
753 sm_msg("unhandled comparison %d\n", comparison
);
759 int true_comparison_range_LR(int comparison
, struct data_range
*var
, struct data_range
*val
, int left
)
762 return true_comparison_range(var
, comparison
, val
);
764 return true_comparison_range(val
, comparison
, var
);
767 static int false_comparison_range_sval(struct data_range
*left
, int comparison
, struct data_range
*right
)
769 switch (comparison
) {
771 case SPECIAL_UNSIGNED_LT
:
772 if (sval_cmp(left
->max
, right
->min
) >= 0)
775 case SPECIAL_UNSIGNED_LTE
:
777 if (sval_cmp(left
->max
, right
->min
) > 0)
781 if (sval_cmp(left
->min
, left
->max
) != 0)
783 if (sval_cmp(right
->min
, right
->max
) != 0)
785 if (sval_cmp(left
->min
, right
->min
) != 0)
788 case SPECIAL_UNSIGNED_GTE
:
790 if (sval_cmp(left
->min
, right
->max
) < 0)
794 case SPECIAL_UNSIGNED_GT
:
795 if (sval_cmp(left
->min
, right
->max
) <= 0)
798 case SPECIAL_NOTEQUAL
:
799 if (sval_cmp(left
->max
, right
->min
) < 0)
801 if (sval_cmp(left
->min
, right
->max
) > 0)
805 sm_msg("unhandled comparison %d\n", comparison
);
811 int false_comparison_range_LR(int comparison
, struct data_range
*var
, struct data_range
*val
, int left
)
814 return false_comparison_range_sval(var
, comparison
, val
);
816 return false_comparison_range_sval(val
, comparison
, var
);
819 int possibly_true(struct expression
*left
, int comparison
, struct expression
*right
)
821 struct range_list
*rl_left
, *rl_right
;
822 struct data_range
*tmp_left
, *tmp_right
;
825 if (!get_implied_rl(left
, &rl_left
))
827 if (!get_implied_rl(right
, &rl_right
))
830 type
= rl_type(rl_left
);
831 if (type_positive_bits(type
) < type_positive_bits(rl_type(rl_right
)))
832 type
= rl_type(rl_right
);
833 if (type_positive_bits(type
) < 31)
836 rl_left
= cast_rl(type
, rl_left
);
837 rl_right
= cast_rl(type
, rl_right
);
839 FOR_EACH_PTR(rl_left
, tmp_left
) {
840 FOR_EACH_PTR(rl_right
, tmp_right
) {
841 if (true_comparison_range(tmp_left
, comparison
, tmp_right
))
843 } END_FOR_EACH_PTR(tmp_right
);
844 } END_FOR_EACH_PTR(tmp_left
);
848 int possibly_false(struct expression
*left
, int comparison
, struct expression
*right
)
850 struct range_list
*rl_left
, *rl_right
;
851 struct data_range
*tmp_left
, *tmp_right
;
854 if (!get_implied_rl(left
, &rl_left
))
856 if (!get_implied_rl(right
, &rl_right
))
859 type
= rl_type(rl_left
);
860 if (type_positive_bits(type
) < type_positive_bits(rl_type(rl_right
)))
861 type
= rl_type(rl_right
);
862 if (type_positive_bits(type
) < 31)
865 rl_left
= cast_rl(type
, rl_left
);
866 rl_right
= cast_rl(type
, rl_right
);
868 FOR_EACH_PTR(rl_left
, tmp_left
) {
869 FOR_EACH_PTR(rl_right
, tmp_right
) {
870 if (false_comparison_range_sval(tmp_left
, comparison
, tmp_right
))
872 } END_FOR_EACH_PTR(tmp_right
);
873 } END_FOR_EACH_PTR(tmp_left
);
877 int possibly_true_rl(struct range_list
*left_ranges
, int comparison
, struct range_list
*right_ranges
)
879 struct data_range
*left_tmp
, *right_tmp
;
881 if (!left_ranges
|| !right_ranges
)
884 FOR_EACH_PTR(left_ranges
, left_tmp
) {
885 FOR_EACH_PTR(right_ranges
, right_tmp
) {
886 if (true_comparison_range(left_tmp
, comparison
, right_tmp
))
888 } END_FOR_EACH_PTR(right_tmp
);
889 } END_FOR_EACH_PTR(left_tmp
);
893 int possibly_false_rl(struct range_list
*left_ranges
, int comparison
, struct range_list
*right_ranges
)
895 struct data_range
*left_tmp
, *right_tmp
;
897 if (!left_ranges
|| !right_ranges
)
900 FOR_EACH_PTR(left_ranges
, left_tmp
) {
901 FOR_EACH_PTR(right_ranges
, right_tmp
) {
902 if (false_comparison_range_sval(left_tmp
, comparison
, right_tmp
))
904 } END_FOR_EACH_PTR(right_tmp
);
905 } END_FOR_EACH_PTR(left_tmp
);
909 /* FIXME: the _rl here stands for right left so really it should be _lr */
910 int possibly_true_rl_LR(int comparison
, struct range_list
*a
, struct range_list
*b
, int left
)
913 return possibly_true_rl(a
, comparison
, b
);
915 return possibly_true_rl(b
, comparison
, a
);
918 int possibly_false_rl_LR(int comparison
, struct range_list
*a
, struct range_list
*b
, int left
)
921 return possibly_false_rl(a
, comparison
, b
);
923 return possibly_false_rl(b
, comparison
, a
);
926 int rl_has_sval(struct range_list
*rl
, sval_t sval
)
928 struct data_range
*tmp
;
930 FOR_EACH_PTR(rl
, tmp
) {
931 if (sval_cmp(tmp
->min
, sval
) <= 0 &&
932 sval_cmp(tmp
->max
, sval
) >= 0)
934 } END_FOR_EACH_PTR(tmp
);
938 void tack_on(struct range_list
**list
, struct data_range
*drange
)
940 add_ptr_list(list
, drange
);
943 void push_rl(struct range_list_stack
**rl_stack
, struct range_list
*rl
)
945 add_ptr_list(rl_stack
, rl
);
948 struct range_list
*pop_rl(struct range_list_stack
**rl_stack
)
950 struct range_list
*rl
;
952 rl
= last_ptr_list((struct ptr_list
*)*rl_stack
);
953 delete_ptr_list_last((struct ptr_list
**)rl_stack
);
957 struct range_list
*top_rl(struct range_list_stack
*rl_stack
)
959 struct range_list
*rl
;
961 rl
= last_ptr_list((struct ptr_list
*)rl_stack
);
965 void filter_top_rl(struct range_list_stack
**rl_stack
, sval_t sval
)
967 struct range_list
*rl
;
969 rl
= pop_rl(rl_stack
);
970 rl
= remove_range(rl
, sval
, sval
);
971 push_rl(rl_stack
, rl
);
974 static int sval_too_big(struct symbol
*type
, sval_t sval
)
976 if (type_bits(type
) == 64)
978 if (sval
.uvalue
> ((1ULL << type_bits(type
)) - 1))
983 static void add_range_t(struct symbol
*type
, struct range_list
**rl
, sval_t min
, sval_t max
)
985 /* If we're just adding a number, cast it and add it */
986 if (sval_cmp(min
, max
) == 0) {
987 add_range(rl
, sval_cast(type
, min
), sval_cast(type
, max
));
991 /* If the range is within the type range then add it */
992 if (sval_fits(type
, min
) && sval_fits(type
, max
)) {
993 add_range(rl
, sval_cast(type
, min
), sval_cast(type
, max
));
998 * If the range we are adding has more bits than the range type then
999 * add the whole range type. Eg:
1000 * 0x8000000000000000 - 0xf000000000000000 -> cast to int
1001 * This isn't totally the right thing to do. We could be more granular.
1003 if (sval_too_big(type
, min
) || sval_too_big(type
, max
)) {
1004 add_range(rl
, sval_type_min(type
), sval_type_max(type
));
1008 /* Cast negative values to high positive values */
1009 if (sval_is_negative(min
) && type_unsigned(type
)) {
1010 if (sval_is_positive(max
)) {
1011 if (sval_too_high(type
, max
)) {
1012 add_range(rl
, sval_type_min(type
), sval_type_max(type
));
1015 add_range(rl
, sval_type_val(type
, 0), sval_cast(type
, max
));
1016 max
= sval_type_max(type
);
1018 max
= sval_cast(type
, max
);
1020 min
= sval_cast(type
, min
);
1021 add_range(rl
, min
, max
);
1024 /* Cast high positive numbers to negative */
1025 if (sval_unsigned(max
) && sval_is_negative(sval_cast(type
, max
))) {
1026 if (!sval_is_negative(sval_cast(type
, min
))) {
1027 add_range(rl
, sval_cast(type
, min
), sval_type_max(type
));
1028 min
= sval_type_min(type
);
1030 min
= sval_cast(type
, min
);
1032 max
= sval_cast(type
, max
);
1033 add_range(rl
, min
, max
);
1036 add_range(rl
, min
, max
);
1040 struct range_list
*rl_truncate_cast(struct symbol
*type
, struct range_list
*rl
)
1042 struct data_range
*tmp
;
1043 struct range_list
*ret
= NULL
;
1049 if (!type
|| type
== rl_type(rl
))
1052 FOR_EACH_PTR(rl
, tmp
) {
1055 if (type_bits(type
) < type_bits(rl_type(rl
))) {
1056 min
.uvalue
= tmp
->min
.uvalue
& ((1ULL << type_bits(type
)) - 1);
1057 max
.uvalue
= tmp
->max
.uvalue
& ((1ULL << type_bits(type
)) - 1);
1059 if (sval_cmp(min
, max
) > 0) {
1060 min
= sval_cast(type
, min
);
1061 max
= sval_cast(type
, max
);
1063 add_range_t(type
, &ret
, min
, max
);
1064 } END_FOR_EACH_PTR(tmp
);
1069 static int rl_is_sane(struct range_list
*rl
)
1071 struct data_range
*tmp
;
1072 struct symbol
*type
;
1075 FOR_EACH_PTR(rl
, tmp
) {
1076 if (!sval_fits(type
, tmp
->min
))
1078 if (!sval_fits(type
, tmp
->max
))
1080 if (sval_cmp(tmp
->min
, tmp
->max
) > 0)
1082 } END_FOR_EACH_PTR(tmp
);
1087 static int rl_type_consistent(struct range_list
*rl
)
1089 struct data_range
*tmp
;
1090 struct symbol
*type
;
1093 FOR_EACH_PTR(rl
, tmp
) {
1094 if (type
!= tmp
->min
.type
|| type
!= tmp
->max
.type
)
1096 } END_FOR_EACH_PTR(tmp
);
1100 struct range_list
*cast_rl(struct symbol
*type
, struct range_list
*rl
)
1102 struct data_range
*tmp
;
1103 struct range_list
*ret
= NULL
;
1110 if (!rl_is_sane(rl
))
1111 return alloc_whole_rl(type
);
1112 if (type
== rl_type(rl
) && rl_type_consistent(rl
))
1115 FOR_EACH_PTR(rl
, tmp
) {
1116 add_range_t(type
, &ret
, tmp
->min
, tmp
->max
);
1117 } END_FOR_EACH_PTR(tmp
);
1120 return alloc_whole_rl(type
);
1125 struct range_list
*rl_invert(struct range_list
*orig
)
1127 struct range_list
*ret
= NULL
;
1128 struct data_range
*tmp
;
1129 sval_t gap_min
, abs_max
, sval
;
1134 gap_min
= sval_type_min(rl_min(orig
).type
);
1135 abs_max
= sval_type_max(rl_max(orig
).type
);
1137 FOR_EACH_PTR(orig
, tmp
) {
1138 if (sval_cmp(tmp
->min
, gap_min
) > 0) {
1139 sval
= sval_type_val(tmp
->min
.type
, tmp
->min
.value
- 1);
1140 add_range(&ret
, gap_min
, sval
);
1142 gap_min
= sval_type_val(tmp
->max
.type
, tmp
->max
.value
+ 1);
1143 if (sval_cmp(tmp
->max
, abs_max
) == 0)
1145 } END_FOR_EACH_PTR(tmp
);
1147 if (sval_cmp(gap_min
, abs_max
) < 0)
1148 add_range(&ret
, gap_min
, abs_max
);
1153 struct range_list
*rl_filter(struct range_list
*rl
, struct range_list
*filter
)
1155 struct data_range
*tmp
;
1157 FOR_EACH_PTR(filter
, tmp
) {
1158 rl
= remove_range(rl
, tmp
->min
, tmp
->max
);
1159 } END_FOR_EACH_PTR(tmp
);
1164 struct range_list
*rl_intersection(struct range_list
*one
, struct range_list
*two
)
1166 struct range_list
*one_orig
;
1167 struct range_list
*two_orig
;
1168 struct range_list
*ret
;
1169 struct symbol
*ret_type
;
1170 struct symbol
*small_type
;
1171 struct symbol
*large_type
;
1181 ret_type
= rl_type(one
);
1182 small_type
= rl_type(one
);
1183 large_type
= rl_type(two
);
1185 if (type_bits(rl_type(two
)) < type_bits(small_type
)) {
1186 small_type
= rl_type(two
);
1187 large_type
= rl_type(one
);
1190 one
= cast_rl(large_type
, one
);
1191 two
= cast_rl(large_type
, two
);
1194 one
= rl_invert(one
);
1195 two
= rl_invert(two
);
1197 ret
= rl_filter(ret
, one
);
1198 ret
= rl_filter(ret
, two
);
1200 one
= cast_rl(small_type
, one_orig
);
1201 two
= cast_rl(small_type
, two_orig
);
1203 one
= rl_invert(one
);
1204 two
= rl_invert(two
);
1206 ret
= cast_rl(small_type
, ret
);
1207 ret
= rl_filter(ret
, one
);
1208 ret
= rl_filter(ret
, two
);
1210 return cast_rl(ret_type
, ret
);
1213 static struct range_list
*handle_mod_rl(struct range_list
*left
, struct range_list
*right
)
1218 max
= rl_max(right
);
1219 if (sval_is_max(max
))
1224 if (sval_is_negative(max
))
1226 if (sval_cmp(rl_max(left
), max
) < 0)
1230 return alloc_rl(zero
, max
);
1233 static struct range_list
*handle_divide_rl(struct range_list
*left
, struct range_list
*right
)
1237 if (sval_is_max(rl_max(left
)))
1239 if (sval_is_max(rl_max(right
)))
1242 if (sval_is_negative(rl_min(left
)))
1244 if (sval_cmp_val(rl_min(right
), 0) <= 0)
1247 max
= sval_binop(rl_max(left
), '/', rl_min(right
));
1248 min
= sval_binop(rl_min(left
), '/', rl_max(right
));
1250 return alloc_rl(min
, max
);
1253 static struct range_list
*handle_add_mult_rl(struct range_list
*left
, int op
, struct range_list
*right
)
1257 if (sval_binop_overflows(rl_min(left
), op
, rl_min(right
)))
1259 min
= sval_binop(rl_min(left
), op
, rl_min(right
));
1261 if (sval_binop_overflows(rl_max(left
), op
, rl_max(right
)))
1263 max
= sval_binop(rl_max(left
), op
, rl_max(right
));
1265 return alloc_rl(min
, max
);
1268 static unsigned long long sval_fls_mask(sval_t sval
)
1270 unsigned long long uvalue
= sval
.uvalue
;
1271 unsigned long long high_bit
= 0;
1281 return ((unsigned long long)-1) >> (64 - high_bit
);
1284 static unsigned long long rl_bits_always_set(struct range_list
*rl
)
1286 return sval_fls_mask(rl_min(rl
));
1289 static unsigned long long rl_bits_maybe_set(struct range_list
*rl
)
1291 return sval_fls_mask(rl_max(rl
));
1294 static struct range_list
*handle_OR_rl(struct range_list
*left
, struct range_list
*right
)
1296 unsigned long long left_min
, left_max
, right_min
, right_max
;
1300 if ((rl_to_sval(left
, &sval
) || rl_to_sval(right
, &sval
)) &&
1301 !sval_binop_overflows(rl_max(left
), '+', rl_max(right
)))
1302 return rl_binop(left
, '+', right
);
1304 left_min
= rl_bits_always_set(left
);
1305 left_max
= rl_bits_maybe_set(left
);
1306 right_min
= rl_bits_always_set(right
);
1307 right_max
= rl_bits_maybe_set(right
);
1309 min
.type
= max
.type
= &ullong_ctype
;
1310 min
.uvalue
= left_min
| right_min
;
1311 max
.uvalue
= left_max
| right_max
;
1313 return cast_rl(rl_type(left
), alloc_rl(min
, max
));
1316 struct range_list
*rl_binop(struct range_list
*left
, int op
, struct range_list
*right
)
1318 struct symbol
*cast_type
;
1319 sval_t left_sval
, right_sval
;
1320 struct range_list
*ret
= NULL
;
1322 cast_type
= rl_type(left
);
1323 if (sval_type_max(rl_type(left
)).uvalue
< sval_type_max(rl_type(right
)).uvalue
)
1324 cast_type
= rl_type(right
);
1325 if (sval_type_max(cast_type
).uvalue
< INT_MAX
)
1326 cast_type
= &int_ctype
;
1328 left
= cast_rl(cast_type
, left
);
1329 right
= cast_rl(cast_type
, right
);
1331 if (!left
|| !right
)
1332 return alloc_whole_rl(cast_type
);
1334 if (rl_to_sval(left
, &left_sval
) && rl_to_sval(right
, &right_sval
)) {
1335 sval_t val
= sval_binop(left_sval
, op
, right_sval
);
1336 return alloc_rl(val
, val
);
1341 ret
= handle_mod_rl(left
, right
);
1344 ret
= handle_divide_rl(left
, right
);
1348 ret
= handle_add_mult_rl(left
, op
, right
);
1351 ret
= handle_OR_rl(left
, right
);
1354 /* FIXME: Do the rest as well */
1357 case SPECIAL_RIGHTSHIFT
:
1358 case SPECIAL_LEFTSHIFT
:
1364 ret
= alloc_whole_rl(cast_type
);
1368 void free_rl(struct range_list
**rlist
)
1370 __free_ptr_list((struct ptr_list
**)rlist
);
1373 static void free_single_dinfo(struct data_info
*dinfo
)
1375 free_rl(&dinfo
->value_ranges
);
1378 static void free_dinfos(struct allocation_blob
*blob
)
1380 unsigned int size
= sizeof(struct data_info
);
1381 unsigned int offset
= 0;
1383 while (offset
< blob
->offset
) {
1384 free_single_dinfo((struct data_info
*)(blob
->data
+ offset
));
1389 void free_data_info_allocs(void)
1391 struct allocator_struct
*desc
= &data_info_allocator
;
1392 struct allocation_blob
*blob
= desc
->blobs
;
1395 desc
->allocations
= 0;
1396 desc
->total_bytes
= 0;
1397 desc
->useful_bytes
= 0;
1398 desc
->freelist
= NULL
;
1400 struct allocation_blob
*next
= blob
->next
;
1402 blob_free(blob
, desc
->chunking
);
1405 clear_data_range_alloc();