2 * Copyright (C) 2009 Dan Carpenter.
4 * This program is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU General Public License
6 * as published by the Free Software Foundation; either version 2
7 * of the License, or (at your option) any later version.
9 * This program is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 * GNU General Public License for more details.
14 * You should have received a copy of the GNU General Public License
15 * along with this program; if not, see http://www.gnu.org/copyleft/gpl.txt
20 #include "smatch_extra.h"
21 #include "smatch_slist.h"
23 ALLOCATOR(data_info
, "smatch extra data");
24 ALLOCATOR(data_range
, "data range");
25 __DO_ALLOCATOR(struct data_range
, sizeof(struct data_range
), __alignof__(struct data_range
),
26 "permanent ranges", perm_data_range
);
27 __DECLARE_ALLOCATOR(struct ptr_list
, rl_ptrlist
);
29 bool is_err_ptr(sval_t sval
)
31 if (option_project
!= PROJ_KERNEL
)
33 if (!type_is_ptr(sval
.type
))
35 if (sval
.uvalue
< -4095ULL)
40 static char *get_err_pointer_str(struct data_range
*drange
)
45 * The kernel has error pointers where you do essentially:
47 * return (void *)(unsigned long)-12;
49 * But what I want here is to print -12 instead of the unsigned version
53 if (!is_err_ptr(drange
->min
))
56 if (drange
->min
.value
== drange
->max
.value
)
57 snprintf(buf
, sizeof(buf
), "(%lld)", drange
->min
.value
);
59 snprintf(buf
, sizeof(buf
), "(%lld)-(%lld)", drange
->min
.value
, drange
->max
.value
);
63 char *show_rl(struct range_list
*list
)
65 struct data_range
*prev_drange
= NULL
;
66 struct data_range
*tmp
;
76 FOR_EACH_PTR(list
, tmp
) {
77 remain
= full
+ sizeof(full
) - p
;
79 snprintf(prev
, full
+ sizeof(full
) - prev
, ",%s-%s",
80 sval_to_str(prev_drange
->min
),
81 sval_to_str(sval_type_max(prev_drange
->min
.type
)));
87 err_ptr
= get_err_pointer_str(tmp
);
89 p
+= snprintf(p
, remain
, "%s%s", i
++ ? "," : "", err_ptr
);
90 } else if (sval_cmp(tmp
->min
, tmp
->max
) == 0) {
91 p
+= snprintf(p
, remain
, "%s%s", i
++ ? "," : "",
92 sval_to_str(tmp
->min
));
94 p
+= snprintf(p
, remain
, "%s%s-%s", i
++ ? "," : "",
95 sval_to_str(tmp
->min
),
96 sval_to_str(tmp
->max
));
98 } END_FOR_EACH_PTR(tmp
);
100 return alloc_sname(full
);
103 void free_all_rl(void)
105 clear_rl_ptrlist_alloc();
108 static int sval_too_big(struct symbol
*type
, sval_t sval
)
110 if (type_bits(type
) >= 32 &&
111 type_bits(sval
.type
) <= type_bits(type
))
113 if (sval
.uvalue
<= ((1ULL << type_bits(type
)) - 1))
115 if (type_signed(sval
.type
)) {
116 if (type_unsigned(type
)) {
117 unsigned long long neg
= ~sval
.uvalue
;
118 if (neg
<= sval_type_max(type
).uvalue
)
121 if (sval
.value
< sval_type_min(type
).value
)
123 if (sval
.value
> sval_type_max(type
).value
)
127 if (sval
.uvalue
> sval_type_max(type
).uvalue
)
132 static int truncates_nicely(struct symbol
*type
, sval_t min
, sval_t max
)
134 unsigned long long mask
;
135 int bits
= type_bits(type
);
137 if (type_is_fp(min
.type
) && !type_is_fp(type
))
140 if (bits
>= type_bits(min
.type
))
143 mask
= -1ULL << bits
;
144 return (min
.uvalue
& mask
) == (max
.uvalue
& mask
);
147 static void add_range_t(struct symbol
*type
, struct range_list
**rl
, sval_t min
, sval_t max
)
149 /* If we're just adding a number, cast it and add it */
150 if (sval_cmp(min
, max
) == 0) {
151 add_range(rl
, sval_cast(type
, min
), sval_cast(type
, max
));
155 /* If the range is within the type range then add it */
156 if (sval_fits(type
, min
) && sval_fits(type
, max
)) {
157 add_range(rl
, sval_cast(type
, min
), sval_cast(type
, max
));
161 if (truncates_nicely(type
, min
, max
)) {
162 add_range(rl
, sval_cast(type
, min
), sval_cast(type
, max
));
167 * If the range we are adding has more bits than the range type then
168 * add the whole range type. Eg:
169 * 0x8000000000000000 - 0xf000000000000000 -> cast to int
172 if (sval_too_big(type
, min
) || sval_too_big(type
, max
)) {
173 add_range(rl
, sval_type_min(type
), sval_type_max(type
));
177 /* Cast negative values to high positive values */
178 if (sval_is_negative(min
) && type_unsigned(type
)) {
179 if (sval_is_positive(max
)) {
180 if (sval_too_high(type
, max
)) {
181 add_range(rl
, sval_type_min(type
), sval_type_max(type
));
184 add_range(rl
, sval_type_val(type
, 0), sval_cast(type
, max
));
185 max
= sval_type_max(type
);
187 max
= sval_cast(type
, max
);
189 min
= sval_cast(type
, min
);
190 add_range(rl
, min
, max
);
193 /* Cast high positive numbers to negative */
194 if (sval_unsigned(max
) && sval_is_negative(sval_cast(type
, max
))) {
195 if (!sval_is_negative(sval_cast(type
, min
))) {
196 add_range(rl
, sval_cast(type
, min
), sval_type_max(type
));
197 min
= sval_type_min(type
);
199 min
= sval_cast(type
, min
);
201 max
= sval_cast(type
, max
);
202 add_range(rl
, min
, max
);
205 add_range(rl
, sval_cast(type
, min
), sval_cast(type
, max
));
209 static int str_to_comparison_arg_helper(const char *str
,
210 struct expression
*call
, int *comparison
,
211 struct expression
**arg
, const char **endp
)
223 *comparison
= SPECIAL_LTE
;
228 } else if (*c
== '=') {
231 *comparison
= SPECIAL_EQUAL
;
232 } else if (*c
== '>') {
235 *comparison
= SPECIAL_GTE
;
240 } else if (*c
== '!') {
243 *comparison
= SPECIAL_NOTEQUAL
;
244 } else if (*c
== '$') {
245 *comparison
= SPECIAL_EQUAL
;
254 param
= strtoll(c
, (char **)&c
, 10);
256 * FIXME: handle parameter math. [==$1 + 100]
262 if (*c
== ',' || *c
== ']')
263 c
++; /* skip the ']' character */
269 *arg
= get_argument_from_call_expr(call
->args
, param
);
272 if (*c
== '-' && *(c
+ 1) == '>') {
276 n
= snprintf(buf
, sizeof(buf
), "$%s", c
);
277 if (n
>= sizeof(buf
))
279 if (buf
[n
- 1] == ']')
281 *arg
= gen_expression_from_key(*arg
, buf
);
282 while (*c
&& *c
!= ']')
288 int str_to_comparison_arg(const char *str
, struct expression
*call
, int *comparison
, struct expression
**arg
)
297 return str_to_comparison_arg_helper(str
, call
, comparison
, arg
, NULL
);
300 static int get_val_from_key(int use_max
, struct symbol
*type
, const char *c
, struct expression
*call
, const char **endp
, sval_t
*sval
)
302 struct expression
*arg
;
307 ret
= sval_type_max(type
);
309 ret
= sval_type_min(type
);
311 if (!str_to_comparison_arg_helper(c
, call
, &comparison
, &arg
, endp
)) {
316 if (use_max
&& get_implied_max(arg
, &tmp
)) {
318 if (comparison
== '<') {
320 ret
= sval_binop(ret
, '-', tmp
);
323 if (!use_max
&& get_implied_min(arg
, &tmp
)) {
325 if (comparison
== '>') {
327 ret
= sval_binop(ret
, '+', tmp
);
335 static sval_t
add_one(sval_t sval
)
341 static sval_t
sub_one(sval_t sval
)
347 void filter_by_comparison(struct range_list
**rl
, int comparison
, struct range_list
*right
)
349 struct range_list
*left_orig
= *rl
;
350 struct range_list
*right_orig
= right
;
351 struct range_list
*ret_rl
= *rl
;
352 struct symbol
*cast_type
;
355 if (comparison
== UNKNOWN_COMPARISON
)
358 cast_type
= rl_type(left_orig
);
359 if (sval_type_max(rl_type(left_orig
)).uvalue
< sval_type_max(rl_type(right_orig
)).uvalue
)
360 cast_type
= rl_type(right_orig
);
361 if (sval_type_max(cast_type
).uvalue
< INT_MAX
)
362 cast_type
= &int_ctype
;
364 min
= sval_type_min(cast_type
);
365 max
= sval_type_max(cast_type
);
366 left_orig
= cast_rl(cast_type
, left_orig
);
367 right_orig
= cast_rl(cast_type
, right_orig
);
369 switch (comparison
) {
371 case SPECIAL_UNSIGNED_LT
:
372 ret_rl
= remove_range(left_orig
, rl_max(right_orig
), max
);
375 case SPECIAL_UNSIGNED_LTE
:
376 if (!sval_is_max(rl_max(right_orig
)))
377 ret_rl
= remove_range(left_orig
, add_one(rl_max(right_orig
)), max
);
380 ret_rl
= rl_intersection(left_orig
, right_orig
);
383 case SPECIAL_UNSIGNED_GTE
:
384 if (!sval_is_min(rl_min(right_orig
)))
385 ret_rl
= remove_range(left_orig
, min
, sub_one(rl_min(right_orig
)));
388 case SPECIAL_UNSIGNED_GT
:
389 ret_rl
= remove_range(left_orig
, min
, rl_min(right_orig
));
391 case SPECIAL_NOTEQUAL
:
392 if (sval_cmp(rl_min(right_orig
), rl_max(right_orig
)) == 0)
393 ret_rl
= remove_range(left_orig
, rl_min(right_orig
), rl_min(right_orig
));
396 sm_perror("unhandled comparison %s", show_special(comparison
));
400 *rl
= cast_rl(rl_type(*rl
), ret_rl
);
403 static struct range_list
*filter_by_comparison_call(const char *c
, struct expression
*call
, const char **endp
, struct range_list
*start_rl
)
406 struct expression
*arg
;
407 struct range_list
*casted_start
, *right_orig
;
410 /* For when we have a function that takes a function pointer. */
411 if (!call
|| call
->type
!= EXPR_CALL
)
414 if (!str_to_comparison_arg_helper(c
, call
, &comparison
, &arg
, endp
))
417 if (!get_implied_rl(arg
, &right_orig
))
421 if (type_positive_bits(rl_type(start_rl
)) > type_positive_bits(type
))
422 type
= rl_type(start_rl
);
423 if (type_positive_bits(rl_type(right_orig
)) > type_positive_bits(type
))
424 type
= rl_type(right_orig
);
426 casted_start
= cast_rl(type
, start_rl
);
427 right_orig
= cast_rl(type
, right_orig
);
429 filter_by_comparison(&casted_start
, comparison
, right_orig
);
430 return cast_rl(rl_type(start_rl
), casted_start
);
433 static sval_t
parse_val(int use_max
, struct expression
*call
, struct symbol
*type
, const char *c
, const char **endp
)
435 const char *start
= c
;
438 if (type
== &float_ctype
)
439 return sval_type_fval(type
, strtof(start
, (char **)endp
));
440 else if (type
== &double_ctype
)
441 return sval_type_fval(type
, strtod(start
, (char **)endp
));
442 else if (type
== &ldouble_ctype
)
443 return sval_type_fval(type
, strtold(start
, (char **)endp
));
445 if (!strncmp(start
, "max", 3)) {
446 ret
= sval_type_max(type
);
448 } else if (!strncmp(start
, "u64max", 6)) {
449 ret
= sval_type_val(type
, ULLONG_MAX
);
451 } else if (!strncmp(start
, "s64max", 6)) {
452 ret
= sval_type_val(type
, LLONG_MAX
);
454 } else if (!strncmp(start
, "u32max", 6)) {
455 ret
= sval_type_val(type
, UINT_MAX
);
457 } else if (!strncmp(start
, "s32max", 6)) {
458 ret
= sval_type_val(type
, INT_MAX
);
460 } else if (!strncmp(start
, "u16max", 6)) {
461 ret
= sval_type_val(type
, USHRT_MAX
);
463 } else if (!strncmp(start
, "s16max", 6)) {
464 ret
= sval_type_val(type
, SHRT_MAX
);
466 } else if (!strncmp(start
, "min", 3)) {
467 ret
= sval_type_min(type
);
469 } else if (!strncmp(start
, "s64min", 6)) {
470 ret
= sval_type_val(type
, LLONG_MIN
);
472 } else if (!strncmp(start
, "s32min", 6)) {
473 ret
= sval_type_val(type
, INT_MIN
);
475 } else if (!strncmp(start
, "s16min", 6)) {
476 ret
= sval_type_val(type
, SHRT_MIN
);
478 } else if (!strncmp(start
, "long_min", 8)) {
479 ret
= sval_type_val(type
, LONG_MIN
);
481 } else if (!strncmp(start
, "long_max", 8)) {
482 ret
= sval_type_val(type
, LONG_MAX
);
484 } else if (!strncmp(start
, "ulong_max", 9)) {
485 ret
= sval_type_val(type
, ULONG_MAX
);
487 } else if (!strncmp(start
, "ptr_max", 7)) {
488 ret
= sval_type_val(type
, valid_ptr_max
);
490 } else if (start
[0] == '[') {
491 /* this parses [==p0] comparisons */
492 get_val_from_key(1, type
, start
, call
, &c
, &ret
);
493 } else if (type_positive_bits(type
) == 64) {
494 ret
= sval_type_val(type
, strtoull(start
, (char **)&c
, 0));
496 ret
= sval_type_val(type
, strtoll(start
, (char **)&c
, 0));
502 static const char *jump_to_call_math(const char *value
)
504 const char *c
= value
;
506 while (*c
&& *c
!= '[')
512 if (*c
== '<' || *c
== '=' || *c
== '>' || *c
== '!')
518 static struct range_list
*get_param_return_rl(struct expression
*call
, const char *call_math
)
520 struct expression
*arg
;
524 param
= atoi(call_math
);
526 arg
= get_argument_from_call_expr(call
->args
, param
);
530 return db_return_vals_no_args(arg
);
533 static void str_to_rl_helper(struct expression
*call
, struct symbol
*type
, const char *str
, const char **endp
, struct range_list
**rl
)
535 struct range_list
*rl_tmp
= NULL
;
536 sval_t prev_min
, min
, max
;
539 prev_min
= sval_type_min(type
);
540 min
= sval_type_min(type
);
541 max
= sval_type_max(type
);
543 while (*c
!= '\0' && *c
!= '[') {
545 if (sval_cmp(min
, sval_type_min(type
)) != 0)
547 max
= sval_type_max(type
);
548 add_range_t(type
, &rl_tmp
, min
, max
);
553 min
= parse_val(0, call
, type
, c
, &c
);
554 if (!sval_fits(type
, min
))
555 min
= sval_type_min(type
);
559 if (*c
== '\0' || *c
== '[') {
560 add_range_t(type
, &rl_tmp
, min
, min
);
564 add_range_t(type
, &rl_tmp
, min
, min
);
570 max
= sval_type_max(type
);
571 add_range_t(type
, &rl_tmp
, min
, max
);
573 if (*c
== '[' || *c
== '\0')
577 sm_msg("debug XXX: trouble parsing %s c = %s", str
, c
);
583 max
= parse_val(1, call
, type
, c
, &c
);
584 if (!sval_fits(type
, max
))
585 max
= sval_type_max(type
);
587 max
= sval_type_max(type
);
588 add_range_t(type
, &rl_tmp
, min
, max
);
590 if (*c
== '[' || *c
== '\0')
594 add_range_t(type
, &rl_tmp
, min
, max
);
605 static void str_to_dinfo(struct expression
*call
, struct symbol
*type
, const char *value
, struct data_info
*dinfo
)
607 struct range_list
*math_rl
;
608 const char *call_math
;
610 struct range_list
*rl
= NULL
;
615 if (strcmp(value
, "empty") == 0)
618 if (strncmp(value
, "[==$", 4) == 0) {
619 struct expression
*arg
;
622 if (!str_to_comparison_arg(value
, call
, &comparison
, &arg
))
624 if (!get_implied_rl(arg
, &rl
))
629 str_to_rl_helper(call
, type
, value
, &c
, &rl
);
633 call_math
= jump_to_call_math(value
);
634 if (call_math
&& call_math
[0] == 'r') {
635 math_rl
= get_param_return_rl(call
, call_math
);
637 rl
= rl_intersection(rl
, math_rl
);
640 if (call_math
&& parse_call_math_rl(call
, call_math
, &math_rl
)) {
641 rl
= rl_intersection(rl
, math_rl
);
646 * For now if we already tried to handle the call math and couldn't
647 * figure it out then bail.
649 if (jump_to_call_math(c
) == c
+ 1)
652 rl
= filter_by_comparison_call(c
, call
, &c
, rl
);
655 rl
= cast_rl(type
, rl
);
656 dinfo
->value_ranges
= rl
;
659 static int rl_is_sane(struct range_list
*rl
)
661 struct data_range
*tmp
;
665 FOR_EACH_PTR(rl
, tmp
) {
666 if (!sval_fits(type
, tmp
->min
))
668 if (!sval_fits(type
, tmp
->max
))
670 if (sval_cmp(tmp
->min
, tmp
->max
) > 0)
672 } END_FOR_EACH_PTR(tmp
);
677 void str_to_rl(struct symbol
*type
, char *value
, struct range_list
**rl
)
679 struct data_info dinfo
= {};
681 str_to_dinfo(NULL
, type
, value
, &dinfo
);
682 if (!rl_is_sane(dinfo
.value_ranges
))
683 dinfo
.value_ranges
= alloc_whole_rl(type
);
684 *rl
= dinfo
.value_ranges
;
687 void call_results_to_rl(struct expression
*expr
, struct symbol
*type
, const char *value
, struct range_list
**rl
)
689 struct data_info dinfo
= {};
691 str_to_dinfo(strip_expr(expr
), type
, value
, &dinfo
);
692 *rl
= dinfo
.value_ranges
;
695 int is_whole_rl(struct range_list
*rl
)
697 struct data_range
*drange
;
699 if (ptr_list_empty((struct ptr_list
*)rl
))
701 drange
= first_ptr_list((struct ptr_list
*)rl
);
702 if (sval_is_min(drange
->min
) && sval_is_max(drange
->max
))
707 int is_unknown_ptr(struct range_list
*rl
)
709 struct data_range
*drange
;
715 FOR_EACH_PTR(rl
, drange
) {
718 if (sval_cmp(drange
->min
, valid_ptr_min_sval
) == 0 &&
719 sval_cmp(drange
->max
, valid_ptr_max_sval
) == 0)
721 } END_FOR_EACH_PTR(drange
);
726 int is_whole_rl_non_zero(struct range_list
*rl
)
728 struct data_range
*drange
;
730 if (ptr_list_empty((struct ptr_list
*)rl
))
732 drange
= first_ptr_list((struct ptr_list
*)rl
);
733 if (sval_unsigned(drange
->min
) &&
734 drange
->min
.value
== 1 &&
735 sval_is_max(drange
->max
))
737 if (!sval_is_min(drange
->min
) || drange
->max
.value
!= -1)
739 drange
= last_ptr_list((struct ptr_list
*)rl
);
740 if (drange
->min
.value
!= 1 || !sval_is_max(drange
->max
))
745 sval_t
rl_min(struct range_list
*rl
)
747 struct data_range
*drange
;
750 ret
.type
= &llong_ctype
;
751 ret
.value
= LLONG_MIN
;
752 if (ptr_list_empty((struct ptr_list
*)rl
))
754 drange
= first_ptr_list((struct ptr_list
*)rl
);
758 sval_t
rl_max(struct range_list
*rl
)
760 struct data_range
*drange
;
763 ret
.type
= &llong_ctype
;
764 ret
.value
= LLONG_MAX
;
765 if (ptr_list_empty((struct ptr_list
*)rl
))
767 drange
= last_ptr_list((struct ptr_list
*)rl
);
771 int rl_to_sval(struct range_list
*rl
, sval_t
*sval
)
780 if (sval_cmp(min
, max
) != 0)
786 struct symbol
*rl_type(struct range_list
*rl
)
790 return rl_min(rl
).type
;
793 static struct data_range
*alloc_range_helper_sval(sval_t min
, sval_t max
, int perm
)
795 struct data_range
*ret
;
798 ret
= __alloc_perm_data_range(0);
800 ret
= __alloc_data_range(0);
806 struct data_range
*alloc_range(sval_t min
, sval_t max
)
808 return alloc_range_helper_sval(min
, max
, 0);
811 struct data_range
*alloc_range_perm(sval_t min
, sval_t max
)
813 return alloc_range_helper_sval(min
, max
, 1);
816 struct range_list
*alloc_rl(sval_t min
, sval_t max
)
818 struct range_list
*rl
= NULL
;
820 if (sval_cmp(min
, max
) > 0)
821 return alloc_whole_rl(min
.type
);
823 add_range(&rl
, min
, max
);
827 struct range_list
*alloc_whole_rl(struct symbol
*type
)
829 if (!type
|| type_positive_bits(type
) < 0)
831 if (type
->type
== SYM_ARRAY
)
834 return alloc_rl(sval_type_min(type
), sval_type_max(type
));
837 static bool collapse_pointer_rl(struct range_list
**rl
, sval_t min
, sval_t max
)
839 struct range_list
*new_rl
= NULL
;
840 struct data_range
*tmp
;
846 * With the mtag work, then we end up getting huge lists of mtags.
847 * That seems cool, but the problem is that we can only store about
848 * 8-10 mtags in the DB before we truncate the list. Also the mtags
849 * aren't really used at all so it's a waste of resources for now...
850 * In the future, we maybe will revisit this code.
857 if (!type_is_ptr(min
.type
))
860 if (ptr_list_size((struct ptr_list
*)*rl
) < 8)
862 FOR_EACH_PTR(*rl
, tmp
) {
863 if (!is_err_ptr(tmp
->min
))
865 } END_FOR_EACH_PTR(tmp
);
869 FOR_EACH_PTR(*rl
, tmp
) {
870 if (sval_cmp(tmp
->min
, valid_ptr_min_sval
) >= 0 &&
871 sval_cmp(tmp
->max
, valid_ptr_max_sval
) <= 0)
872 add_range(&new_rl
, valid_ptr_min_sval
, valid_ptr_max_sval
);
874 add_range(&new_rl
, tmp
->min
, tmp
->max
);
875 } END_FOR_EACH_PTR(tmp
);
877 add_range(&new_rl
, min
, max
);
886 extern int rl_ptrlist_hack
;
887 void add_range(struct range_list
**list
, sval_t min
, sval_t max
)
889 struct data_range
*tmp
;
890 struct data_range
*new = NULL
;
894 * There is at least on valid reason why the types might be confusing
895 * and that's when you have a void pointer and on some paths you treat
896 * it as a u8 pointer and on other paths you treat it as a u16 pointer.
897 * This case is hard to deal with.
899 * There are other cases where we probably should be more specific about
900 * the types than we are. For example, we end up merging a lot of ulong
901 * with pointers and I have not figured out why we do that.
903 * But this hack works for both cases, I think. We cast it to pointers
904 * or we use the bigger size.
907 if (*list
&& rl_type(*list
) != min
.type
) {
908 if (rl_type(*list
)->type
== SYM_PTR
) {
909 min
= sval_cast(rl_type(*list
), min
);
910 max
= sval_cast(rl_type(*list
), max
);
911 } else if (min
.type
->type
== SYM_PTR
) {
912 *list
= cast_rl(min
.type
, *list
);
913 } else if (type_bits(rl_type(*list
)) >= type_bits(min
.type
)) {
914 min
= sval_cast(rl_type(*list
), min
);
915 max
= sval_cast(rl_type(*list
), max
);
917 *list
= cast_rl(min
.type
, *list
);
921 if (sval_cmp(min
, max
) > 0) {
922 min
= sval_type_min(min
.type
);
923 max
= sval_type_max(min
.type
);
926 if (collapse_pointer_rl(list
, min
, max
))
930 * FIXME: This has a problem merging a range_list like: min-0,3-max
931 * with a range like 1-2. You end up with min-2,3-max instead of
934 FOR_EACH_PTR(*list
, tmp
) {
936 /* Sometimes we overlap with more than one range
937 so we have to delete or modify the next range. */
938 if (!sval_is_max(max
) && max
.value
+ 1 == tmp
->min
.value
) {
939 /* join 2 ranges here */
941 DELETE_CURRENT_PTR(tmp
);
945 /* Doesn't overlap with the next one. */
946 if (sval_cmp(max
, tmp
->min
) < 0)
949 if (sval_cmp(max
, tmp
->max
) <= 0) {
950 /* Partially overlaps the next one. */
952 DELETE_CURRENT_PTR(tmp
);
955 /* Completely overlaps the next one. */
956 DELETE_CURRENT_PTR(tmp
);
957 /* there could be more ranges to delete */
961 if (!sval_is_max(max
) && max
.value
+ 1 == tmp
->min
.value
) {
962 /* join 2 ranges into a big range */
963 new = alloc_range(min
, tmp
->max
);
964 REPLACE_CURRENT_PTR(tmp
, new);
967 if (sval_cmp(max
, tmp
->min
) < 0) { /* new range entirely below */
968 new = alloc_range(min
, max
);
969 INSERT_CURRENT(new, tmp
);
972 if (sval_cmp(min
, tmp
->min
) < 0) { /* new range partially below */
973 if (sval_cmp(max
, tmp
->max
) < 0)
977 new = alloc_range(min
, max
);
978 REPLACE_CURRENT_PTR(tmp
, new);
983 if (sval_cmp(max
, tmp
->max
) <= 0) /* new range already included */
985 if (sval_cmp(min
, tmp
->max
) <= 0) { /* new range partially above */
987 new = alloc_range(min
, max
);
988 REPLACE_CURRENT_PTR(tmp
, new);
992 if (!sval_is_min(min
) && min
.value
- 1 == tmp
->max
.value
) {
993 /* join 2 ranges into a big range */
994 new = alloc_range(tmp
->min
, max
);
995 REPLACE_CURRENT_PTR(tmp
, new);
999 /* the new range is entirely above the existing ranges */
1000 } END_FOR_EACH_PTR(tmp
);
1003 new = alloc_range(min
, max
);
1005 rl_ptrlist_hack
= 1;
1006 add_ptr_list(list
, new);
1007 rl_ptrlist_hack
= 0;
1010 struct range_list
*clone_rl(struct range_list
*list
)
1012 struct data_range
*tmp
;
1013 struct range_list
*ret
= NULL
;
1015 FOR_EACH_PTR(list
, tmp
) {
1016 add_ptr_list(&ret
, tmp
);
1017 } END_FOR_EACH_PTR(tmp
);
1021 struct range_list
*clone_rl_permanent(struct range_list
*list
)
1023 struct data_range
*tmp
;
1024 struct data_range
*new;
1025 struct range_list
*ret
= NULL
;
1027 FOR_EACH_PTR(list
, tmp
) {
1028 new = alloc_range_perm(tmp
->min
, tmp
->max
);
1029 add_ptr_list(&ret
, new);
1030 } END_FOR_EACH_PTR(tmp
);
1034 struct range_list
*rl_union(struct range_list
*one
, struct range_list
*two
)
1036 struct data_range
*tmp
;
1037 struct range_list
*ret
= NULL
;
1039 FOR_EACH_PTR(one
, tmp
) {
1040 add_range(&ret
, tmp
->min
, tmp
->max
);
1041 } END_FOR_EACH_PTR(tmp
);
1042 FOR_EACH_PTR(two
, tmp
) {
1043 add_range(&ret
, tmp
->min
, tmp
->max
);
1044 } END_FOR_EACH_PTR(tmp
);
1048 struct range_list
*remove_range(struct range_list
*list
, sval_t min
, sval_t max
)
1050 struct data_range
*tmp
;
1051 struct range_list
*ret
= NULL
;
1056 min
= sval_cast(rl_type(list
), min
);
1057 max
= sval_cast(rl_type(list
), max
);
1058 if (sval_cmp(min
, max
) > 0) {
1064 FOR_EACH_PTR(list
, tmp
) {
1065 if (sval_cmp(tmp
->max
, min
) < 0) {
1066 add_range(&ret
, tmp
->min
, tmp
->max
);
1069 if (sval_cmp(tmp
->min
, max
) > 0) {
1070 add_range(&ret
, tmp
->min
, tmp
->max
);
1073 if (sval_cmp(tmp
->min
, min
) >= 0 && sval_cmp(tmp
->max
, max
) <= 0)
1075 if (sval_cmp(tmp
->min
, min
) >= 0) {
1077 add_range(&ret
, max
, tmp
->max
);
1078 } else if (sval_cmp(tmp
->max
, max
) <= 0) {
1080 add_range(&ret
, tmp
->min
, min
);
1084 add_range(&ret
, tmp
->min
, min
);
1085 add_range(&ret
, max
, tmp
->max
);
1087 } END_FOR_EACH_PTR(tmp
);
1091 int ranges_equiv(struct data_range
*one
, struct data_range
*two
)
1097 if (sval_cmp(one
->min
, two
->min
) != 0)
1099 if (sval_cmp(one
->max
, two
->max
) != 0)
1104 int rl_equiv(struct range_list
*one
, struct range_list
*two
)
1106 struct data_range
*one_range
;
1107 struct data_range
*two_range
;
1112 PREPARE_PTR_LIST(one
, one_range
);
1113 PREPARE_PTR_LIST(two
, two_range
);
1115 if (!one_range
&& !two_range
)
1117 if (!ranges_equiv(one_range
, two_range
))
1119 NEXT_PTR_LIST(one_range
);
1120 NEXT_PTR_LIST(two_range
);
1122 FINISH_PTR_LIST(two_range
);
1123 FINISH_PTR_LIST(one_range
);
1128 int true_comparison_range(struct data_range
*left
, int comparison
, struct data_range
*right
)
1130 switch (comparison
) {
1132 case SPECIAL_UNSIGNED_LT
:
1133 if (sval_cmp(left
->min
, right
->max
) < 0)
1136 case SPECIAL_UNSIGNED_LTE
:
1138 if (sval_cmp(left
->min
, right
->max
) <= 0)
1142 if (sval_cmp(left
->max
, right
->min
) < 0)
1144 if (sval_cmp(left
->min
, right
->max
) > 0)
1147 case SPECIAL_UNSIGNED_GTE
:
1149 if (sval_cmp(left
->max
, right
->min
) >= 0)
1153 case SPECIAL_UNSIGNED_GT
:
1154 if (sval_cmp(left
->max
, right
->min
) > 0)
1157 case SPECIAL_NOTEQUAL
:
1158 if (sval_cmp(left
->min
, left
->max
) != 0)
1160 if (sval_cmp(right
->min
, right
->max
) != 0)
1162 if (sval_cmp(left
->min
, right
->min
) != 0)
1166 sm_perror("unhandled comparison %d", comparison
);
1172 int true_comparison_range_LR(int comparison
, struct data_range
*var
, struct data_range
*val
, int left
)
1175 return true_comparison_range(var
, comparison
, val
);
1177 return true_comparison_range(val
, comparison
, var
);
1180 static int false_comparison_range_sval(struct data_range
*left
, int comparison
, struct data_range
*right
)
1182 switch (comparison
) {
1184 case SPECIAL_UNSIGNED_LT
:
1185 if (sval_cmp(left
->max
, right
->min
) >= 0)
1188 case SPECIAL_UNSIGNED_LTE
:
1190 if (sval_cmp(left
->max
, right
->min
) > 0)
1194 if (sval_cmp(left
->min
, left
->max
) != 0)
1196 if (sval_cmp(right
->min
, right
->max
) != 0)
1198 if (sval_cmp(left
->min
, right
->min
) != 0)
1201 case SPECIAL_UNSIGNED_GTE
:
1203 if (sval_cmp(left
->min
, right
->max
) < 0)
1207 case SPECIAL_UNSIGNED_GT
:
1208 if (sval_cmp(left
->min
, right
->max
) <= 0)
1211 case SPECIAL_NOTEQUAL
:
1212 if (sval_cmp(left
->max
, right
->min
) < 0)
1214 if (sval_cmp(left
->min
, right
->max
) > 0)
1218 sm_perror("unhandled comparison %d", comparison
);
1224 int false_comparison_range_LR(int comparison
, struct data_range
*var
, struct data_range
*val
, int left
)
1227 return false_comparison_range_sval(var
, comparison
, val
);
1229 return false_comparison_range_sval(val
, comparison
, var
);
1232 int possibly_true(struct expression
*left
, int comparison
, struct expression
*right
)
1234 struct range_list
*rl_left
, *rl_right
;
1235 struct data_range
*tmp_left
, *tmp_right
;
1236 struct symbol
*type
;
1238 if (comparison
== UNKNOWN_COMPARISON
)
1240 if (!get_implied_rl(left
, &rl_left
))
1242 if (!get_implied_rl(right
, &rl_right
))
1245 type
= rl_type(rl_left
);
1246 if (type_positive_bits(type
) < type_positive_bits(rl_type(rl_right
)))
1247 type
= rl_type(rl_right
);
1248 if (type_positive_bits(type
) < 31)
1251 rl_left
= cast_rl(type
, rl_left
);
1252 rl_right
= cast_rl(type
, rl_right
);
1254 FOR_EACH_PTR(rl_left
, tmp_left
) {
1255 FOR_EACH_PTR(rl_right
, tmp_right
) {
1256 if (true_comparison_range(tmp_left
, comparison
, tmp_right
))
1258 } END_FOR_EACH_PTR(tmp_right
);
1259 } END_FOR_EACH_PTR(tmp_left
);
1263 int possibly_false(struct expression
*left
, int comparison
, struct expression
*right
)
1265 struct range_list
*rl_left
, *rl_right
;
1266 struct data_range
*tmp_left
, *tmp_right
;
1267 struct symbol
*type
;
1269 if (!get_implied_rl(left
, &rl_left
))
1271 if (!get_implied_rl(right
, &rl_right
))
1274 type
= rl_type(rl_left
);
1275 if (type_positive_bits(type
) < type_positive_bits(rl_type(rl_right
)))
1276 type
= rl_type(rl_right
);
1277 if (type_positive_bits(type
) < 31)
1280 rl_left
= cast_rl(type
, rl_left
);
1281 rl_right
= cast_rl(type
, rl_right
);
1283 FOR_EACH_PTR(rl_left
, tmp_left
) {
1284 FOR_EACH_PTR(rl_right
, tmp_right
) {
1285 if (false_comparison_range_sval(tmp_left
, comparison
, tmp_right
))
1287 } END_FOR_EACH_PTR(tmp_right
);
1288 } END_FOR_EACH_PTR(tmp_left
);
1292 int possibly_true_rl(struct range_list
*left_ranges
, int comparison
, struct range_list
*right_ranges
)
1294 struct data_range
*left_tmp
, *right_tmp
;
1295 struct symbol
*type
;
1297 if (!left_ranges
|| !right_ranges
|| comparison
== UNKNOWN_COMPARISON
)
1300 type
= rl_type(left_ranges
);
1301 if (type_positive_bits(type
) < type_positive_bits(rl_type(right_ranges
)))
1302 type
= rl_type(right_ranges
);
1303 if (type_positive_bits(type
) < 31)
1306 left_ranges
= cast_rl(type
, left_ranges
);
1307 right_ranges
= cast_rl(type
, right_ranges
);
1309 FOR_EACH_PTR(left_ranges
, left_tmp
) {
1310 FOR_EACH_PTR(right_ranges
, right_tmp
) {
1311 if (true_comparison_range(left_tmp
, comparison
, right_tmp
))
1313 } END_FOR_EACH_PTR(right_tmp
);
1314 } END_FOR_EACH_PTR(left_tmp
);
1318 int possibly_false_rl(struct range_list
*left_ranges
, int comparison
, struct range_list
*right_ranges
)
1320 struct data_range
*left_tmp
, *right_tmp
;
1321 struct symbol
*type
;
1323 if (!left_ranges
|| !right_ranges
|| comparison
== UNKNOWN_COMPARISON
)
1326 type
= rl_type(left_ranges
);
1327 if (type_positive_bits(type
) < type_positive_bits(rl_type(right_ranges
)))
1328 type
= rl_type(right_ranges
);
1329 if (type_positive_bits(type
) < 31)
1332 left_ranges
= cast_rl(type
, left_ranges
);
1333 right_ranges
= cast_rl(type
, right_ranges
);
1335 FOR_EACH_PTR(left_ranges
, left_tmp
) {
1336 FOR_EACH_PTR(right_ranges
, right_tmp
) {
1337 if (false_comparison_range_sval(left_tmp
, comparison
, right_tmp
))
1339 } END_FOR_EACH_PTR(right_tmp
);
1340 } END_FOR_EACH_PTR(left_tmp
);
1344 /* FIXME: the _rl here stands for right left so really it should be _lr */
1345 int possibly_true_rl_LR(int comparison
, struct range_list
*a
, struct range_list
*b
, int left
)
1348 return possibly_true_rl(a
, comparison
, b
);
1350 return possibly_true_rl(b
, comparison
, a
);
1353 int possibly_false_rl_LR(int comparison
, struct range_list
*a
, struct range_list
*b
, int left
)
1356 return possibly_false_rl(a
, comparison
, b
);
1358 return possibly_false_rl(b
, comparison
, a
);
1361 int rl_has_sval(struct range_list
*rl
, sval_t sval
)
1363 struct data_range
*tmp
;
1365 FOR_EACH_PTR(rl
, tmp
) {
1366 if (sval_cmp(tmp
->min
, sval
) <= 0 &&
1367 sval_cmp(tmp
->max
, sval
) >= 0)
1369 } END_FOR_EACH_PTR(tmp
);
1373 void tack_on(struct range_list
**list
, struct data_range
*drange
)
1375 add_ptr_list(list
, drange
);
1378 void push_rl(struct range_list_stack
**rl_stack
, struct range_list
*rl
)
1380 add_ptr_list(rl_stack
, rl
);
1383 struct range_list
*pop_rl(struct range_list_stack
**rl_stack
)
1385 struct range_list
*rl
;
1387 rl
= last_ptr_list((struct ptr_list
*)*rl_stack
);
1388 delete_ptr_list_last((struct ptr_list
**)rl_stack
);
1392 struct range_list
*top_rl(struct range_list_stack
*rl_stack
)
1394 struct range_list
*rl
;
1396 rl
= last_ptr_list((struct ptr_list
*)rl_stack
);
1400 void filter_top_rl(struct range_list_stack
**rl_stack
, struct range_list
*filter
)
1402 struct range_list
*rl
;
1404 rl
= pop_rl(rl_stack
);
1405 rl
= rl_filter(rl
, filter
);
1406 push_rl(rl_stack
, rl
);
1409 struct range_list
*rl_truncate_cast(struct symbol
*type
, struct range_list
*rl
)
1411 struct data_range
*tmp
;
1412 struct range_list
*ret
= NULL
;
1418 if (!type
|| type
== rl_type(rl
))
1421 FOR_EACH_PTR(rl
, tmp
) {
1424 if (type_bits(type
) < type_bits(rl_type(rl
))) {
1425 min
.uvalue
= tmp
->min
.uvalue
& ((1ULL << type_bits(type
)) - 1);
1426 max
.uvalue
= tmp
->max
.uvalue
& ((1ULL << type_bits(type
)) - 1);
1428 if (sval_cmp(min
, max
) > 0) {
1429 min
= sval_cast(type
, min
);
1430 max
= sval_cast(type
, max
);
1432 add_range_t(type
, &ret
, min
, max
);
1433 } END_FOR_EACH_PTR(tmp
);
1438 int rl_fits_in_type(struct range_list
*rl
, struct symbol
*type
)
1440 if (type_bits(rl_type(rl
)) <= type_bits(type
))
1442 if (sval_cmp(rl_max(rl
), sval_type_max(type
)) > 0)
1444 if (sval_is_negative(rl_min(rl
)) &&
1445 sval_cmp(rl_min(rl
), sval_type_min(type
)) < 0)
1450 static int rl_type_consistent(struct range_list
*rl
)
1452 struct data_range
*tmp
;
1453 struct symbol
*type
;
1456 FOR_EACH_PTR(rl
, tmp
) {
1457 if (type
!= tmp
->min
.type
|| type
!= tmp
->max
.type
)
1459 } END_FOR_EACH_PTR(tmp
);
1463 static struct range_list
*cast_to_bool(struct range_list
*rl
)
1465 struct data_range
*tmp
;
1466 struct range_list
*ret
= NULL
;
1469 sval_t min
= { .type
= &bool_ctype
};
1470 sval_t max
= { .type
= &bool_ctype
};
1472 FOR_EACH_PTR(rl
, tmp
) {
1473 if (tmp
->min
.value
|| tmp
->max
.value
)
1475 if (sval_is_negative(tmp
->min
) &&
1476 sval_is_negative(tmp
->max
))
1478 if (tmp
->min
.value
== 0 ||
1479 tmp
->max
.value
== 0)
1481 if (sval_is_negative(tmp
->min
) &&
1484 } END_FOR_EACH_PTR(tmp
);
1491 add_range(&ret
, min
, max
);
1495 struct range_list
*cast_rl(struct symbol
*type
, struct range_list
*rl
)
1497 struct data_range
*tmp
;
1498 struct range_list
*ret
= NULL
;
1505 if (!rl_is_sane(rl
))
1506 return alloc_whole_rl(type
);
1507 if (type
== rl_type(rl
) && rl_type_consistent(rl
))
1510 if (type
== &bool_ctype
)
1511 return cast_to_bool(rl
);
1513 FOR_EACH_PTR(rl
, tmp
) {
1514 add_range_t(type
, &ret
, tmp
->min
, tmp
->max
);
1515 } END_FOR_EACH_PTR(tmp
);
1518 return alloc_whole_rl(type
);
1523 struct range_list
*rl_filter(struct range_list
*rl
, struct range_list
*filter
)
1525 struct data_range
*tmp
;
1527 FOR_EACH_PTR(filter
, tmp
) {
1528 rl
= remove_range(rl
, tmp
->min
, tmp
->max
);
1529 } END_FOR_EACH_PTR(tmp
);
1534 struct range_list
*do_intersection(struct range_list
*one_rl
, struct range_list
*two_rl
)
1536 struct data_range
*one
, *two
;
1537 struct range_list
*ret
= NULL
;
1540 PREPARE_PTR_LIST(one_rl
, one
);
1541 PREPARE_PTR_LIST(two_rl
, two
);
1546 if (sval_cmp(one
->max
, two
->min
) < 0) {
1550 if (sval_cmp(one
->min
, two
->min
) < 0 && sval_cmp(one
->max
, two
->max
) <= 0) {
1551 add_range(&ret
, two
->min
, one
->max
);
1555 if (sval_cmp(one
->min
, two
->min
) >= 0 && sval_cmp(one
->max
, two
->max
) <= 0) {
1556 add_range(&ret
, one
->min
, one
->max
);
1560 if (sval_cmp(one
->min
, two
->min
) < 0 && sval_cmp(one
->max
, two
->max
) > 0) {
1561 add_range(&ret
, two
->min
, two
->max
);
1565 if (sval_cmp(one
->min
, two
->max
) <= 0 && sval_cmp(one
->max
, two
->max
) > 0) {
1566 add_range(&ret
, one
->min
, two
->max
);
1570 if (sval_cmp(one
->min
, two
->max
) <= 0) {
1571 sm_fatal("error calculating intersection of '%s' and '%s'", show_rl(one_rl
), show_rl(two_rl
));
1577 FINISH_PTR_LIST(two
);
1578 FINISH_PTR_LIST(one
);
1583 struct range_list
*rl_intersection(struct range_list
*one
, struct range_list
*two
)
1585 struct range_list
*ret
;
1586 struct symbol
*ret_type
;
1587 struct symbol
*small_type
;
1588 struct symbol
*large_type
;
1593 ret_type
= rl_type(one
);
1594 small_type
= rl_type(one
);
1595 large_type
= rl_type(two
);
1597 if (type_bits(rl_type(two
)) < type_bits(small_type
)) {
1598 small_type
= rl_type(two
);
1599 large_type
= rl_type(one
);
1602 one
= cast_rl(large_type
, one
);
1603 two
= cast_rl(large_type
, two
);
1605 ret
= do_intersection(one
, two
);
1606 return cast_rl(ret_type
, ret
);
1609 static struct range_list
*handle_mod_rl(struct range_list
*left
, struct range_list
*right
)
1614 max
= rl_max(right
);
1615 if (sval_is_max(max
))
1620 if (sval_is_negative(max
))
1622 if (sval_cmp(rl_max(left
), max
) < 0)
1626 return alloc_rl(zero
, max
);
1629 static struct range_list
*get_neg_rl(struct range_list
*rl
)
1631 struct data_range
*tmp
;
1632 struct data_range
*new;
1633 struct range_list
*ret
= NULL
;
1637 if (sval_is_positive(rl_min(rl
)))
1640 FOR_EACH_PTR(rl
, tmp
) {
1641 if (sval_is_positive(tmp
->min
))
1643 if (sval_is_positive(tmp
->max
)) {
1644 new = alloc_range(tmp
->min
, tmp
->max
);
1645 new->max
.value
= -1;
1646 add_range(&ret
, new->min
, new->max
);
1649 add_range(&ret
, tmp
->min
, tmp
->max
);
1650 } END_FOR_EACH_PTR(tmp
);
1655 static struct range_list
*get_pos_rl(struct range_list
*rl
)
1657 struct data_range
*tmp
;
1658 struct data_range
*new;
1659 struct range_list
*ret
= NULL
;
1663 if (sval_is_negative(rl_max(rl
)))
1666 FOR_EACH_PTR(rl
, tmp
) {
1667 if (sval_is_negative(tmp
->max
))
1669 if (sval_is_positive(tmp
->min
)) {
1670 add_range(&ret
, tmp
->min
, tmp
->max
);
1673 new = alloc_range(tmp
->min
, tmp
->max
);
1675 add_range(&ret
, new->min
, new->max
);
1676 } END_FOR_EACH_PTR(tmp
);
1681 static struct range_list
*divide_rl_helper(struct range_list
*left
, struct range_list
*right
)
1683 sval_t right_min
, right_max
;
1686 if (!left
|| !right
)
1689 /* let's assume we never divide by zero */
1690 right_min
= rl_min(right
);
1691 right_max
= rl_max(right
);
1692 if (right_min
.value
== 0 && right_max
.value
== 0)
1694 if (right_min
.value
== 0)
1695 right_min
.value
= 1;
1696 if (right_max
.value
== 0)
1697 right_max
.value
= -1;
1699 max
= sval_binop(rl_max(left
), '/', right_min
);
1700 min
= sval_binop(rl_min(left
), '/', right_max
);
1702 return alloc_rl(min
, max
);
1705 static struct range_list
*handle_divide_rl(struct range_list
*left
, struct range_list
*right
)
1707 struct range_list
*left_neg
, *left_pos
, *right_neg
, *right_pos
;
1708 struct range_list
*neg_neg
, *neg_pos
, *pos_neg
, *pos_pos
;
1709 struct range_list
*ret
;
1711 if (is_whole_rl(right
))
1714 left_neg
= get_neg_rl(left
);
1715 left_pos
= get_pos_rl(left
);
1716 right_neg
= get_neg_rl(right
);
1717 right_pos
= get_pos_rl(right
);
1719 neg_neg
= divide_rl_helper(left_neg
, right_neg
);
1720 neg_pos
= divide_rl_helper(left_neg
, right_pos
);
1721 pos_neg
= divide_rl_helper(left_pos
, right_neg
);
1722 pos_pos
= divide_rl_helper(left_pos
, right_pos
);
1724 ret
= rl_union(neg_neg
, neg_pos
);
1725 ret
= rl_union(ret
, pos_neg
);
1726 return rl_union(ret
, pos_pos
);
1729 static struct range_list
*ptr_add_mult(struct range_list
*left
, int op
, struct range_list
*right
)
1731 struct range_list
*ret
;
1732 sval_t l_sval
, r_sval
, res
;
1735 * This function is sort of the wrong API because it takes two pointer
1736 * and adds them together. The caller is expected to figure out
1737 * alignment. Neither of those are the correct things to do.
1739 * Really this function is quite bogus...
1742 if (rl_to_sval(left
, &l_sval
) && rl_to_sval(right
, &r_sval
)) {
1743 res
= sval_binop(l_sval
, op
, r_sval
);
1744 return alloc_rl(res
, res
);
1747 if (rl_min(left
).value
!= 0 || rl_max(right
).value
!= 0) {
1748 ret
= alloc_rl(valid_ptr_min_sval
, valid_ptr_max_sval
);
1749 return cast_rl(rl_type(left
), ret
);
1752 return alloc_whole_rl(rl_type(left
));
1755 static struct range_list
*handle_add_mult_rl(struct range_list
*left
, int op
, struct range_list
*right
)
1759 if (type_is_ptr(rl_type(left
)) || type_is_ptr(rl_type(right
)))
1760 return ptr_add_mult(left
, op
, right
);
1762 if (sval_binop_overflows(rl_min(left
), op
, rl_min(right
)))
1764 min
= sval_binop(rl_min(left
), op
, rl_min(right
));
1766 if (sval_binop_overflows(rl_max(left
), op
, rl_max(right
)))
1768 max
= sval_binop(rl_max(left
), op
, rl_max(right
));
1770 return alloc_rl(min
, max
);
1773 static struct range_list
*handle_sub_rl(struct range_list
*left_orig
, struct range_list
*right_orig
)
1775 struct range_list
*left_rl
, *right_rl
;
1776 struct symbol
*type
;
1778 sval_t min_ll
, max_ll
, res_ll
;
1781 /* TODO: These things should totally be using dranges where possible */
1783 if (!left_orig
|| !right_orig
)
1787 if (type_positive_bits(rl_type(left_orig
)) > type_positive_bits(type
))
1788 type
= rl_type(left_orig
);
1789 if (type_positive_bits(rl_type(right_orig
)) > type_positive_bits(type
))
1790 type
= rl_type(right_orig
);
1792 left_rl
= cast_rl(type
, left_orig
);
1793 right_rl
= cast_rl(type
, right_orig
);
1795 max
= rl_max(left_rl
);
1796 min
= sval_type_min(type
);
1798 min_ll
= rl_min(left_rl
);
1799 min_ll
.type
= &llong_ctype
;
1800 max_ll
= rl_max(right_rl
);
1801 max_ll
.type
= &llong_ctype
;
1803 res_ll
.value
= min_ll
.value
- max_ll
.value
;
1805 if (!sval_binop_overflows(rl_min(left_rl
), '-', rl_max(right_rl
))) {
1806 tmp
= sval_binop(rl_min(left_rl
), '-', rl_max(right_rl
));
1807 if (sval_cmp(tmp
, min
) > 0)
1809 } else if (type_positive_bits(type
) < 63 &&
1810 !sval_binop_overflows(min_ll
, '-', max_ll
) &&
1811 (min
.value
!= 0 && sval_cmp(res_ll
, min
) >= 0)) {
1812 struct range_list
*left_casted
, *right_casted
, *result
;
1814 left_casted
= cast_rl(&llong_ctype
, left_orig
);
1815 right_casted
= cast_rl(&llong_ctype
, right_orig
);
1816 result
= handle_sub_rl(left_casted
, right_casted
);
1817 return cast_rl(type
, result
);
1820 if (!sval_is_max(rl_max(left_rl
))) {
1821 tmp
= sval_binop(rl_max(left_rl
), '-', rl_min(right_rl
));
1822 if (sval_cmp(tmp
, max
) < 0)
1826 if (sval_is_min(min
) && sval_is_max(max
))
1829 return alloc_rl(min
, max
);
1832 static unsigned long long rl_bits_always_set(struct range_list
*rl
)
1834 return sval_fls_mask(rl_min(rl
));
1837 static unsigned long long rl_bits_maybe_set(struct range_list
*rl
)
1839 return sval_fls_mask(rl_max(rl
));
1842 static struct range_list
*handle_OR_rl(struct range_list
*left
, struct range_list
*right
)
1844 unsigned long long left_min
, left_max
, right_min
, right_max
;
1848 if ((rl_to_sval(left
, &sval
) || rl_to_sval(right
, &sval
)) &&
1849 !sval_binop_overflows(rl_max(left
), '+', rl_max(right
)))
1850 return rl_binop(left
, '+', right
);
1852 left_min
= rl_bits_always_set(left
);
1853 left_max
= rl_bits_maybe_set(left
);
1854 right_min
= rl_bits_always_set(right
);
1855 right_max
= rl_bits_maybe_set(right
);
1857 min
.type
= max
.type
= &ullong_ctype
;
1858 min
.uvalue
= left_min
| right_min
;
1859 max
.uvalue
= left_max
| right_max
;
1861 return cast_rl(rl_type(left
), alloc_rl(min
, max
));
1864 static struct range_list
*handle_XOR_rl(struct range_list
*left
, struct range_list
*right
)
1866 unsigned long long left_set
, left_maybe
;
1867 unsigned long long right_set
, right_maybe
;
1870 left_set
= rl_bits_always_set(left
);
1871 left_maybe
= rl_bits_maybe_set(left
);
1873 right_set
= rl_bits_always_set(right
);
1874 right_maybe
= rl_bits_maybe_set(right
);
1876 zero
= max
= rl_min(left
);
1878 max
.uvalue
= fls_mask((left_maybe
| right_maybe
) ^ (left_set
& right_set
));
1880 return cast_rl(rl_type(left
), alloc_rl(zero
, max
));
1883 static sval_t
sval_lowest_set_bit(sval_t sval
)
1885 sval_t ret
= { .type
= sval
.type
};
1888 for (i
= 0; i
< 64; i
++) {
1889 if (sval
.uvalue
& 1ULL << i
) {
1890 ret
.uvalue
= (1ULL << i
);
1897 static struct range_list
*handle_AND_rl(struct range_list
*left
, struct range_list
*right
)
1899 struct bit_info
*one
, *two
;
1900 struct range_list
*rl
;
1901 sval_t min
, max
, zero
;
1902 unsigned long long bits
;
1904 one
= rl_to_binfo(left
);
1905 two
= rl_to_binfo(right
);
1906 bits
= one
->possible
& two
->possible
;
1910 min
= sval_lowest_set_bit(max
);
1912 rl
= alloc_rl(min
, max
);
1916 add_range(&rl
, zero
, zero
);
1921 static struct range_list
*handle_lshift(struct range_list
*left_orig
, struct range_list
*right_orig
)
1923 struct range_list
*left
;
1924 struct data_range
*tmp
;
1925 struct range_list
*ret
= NULL
;
1926 sval_t zero
= { .type
= rl_type(left_orig
), };
1927 sval_t shift
, min
, max
;
1928 bool add_zero
= false;
1930 if (!rl_to_sval(right_orig
, &shift
) || sval_is_negative(shift
))
1932 if (shift
.value
== 0)
1935 /* Cast to unsigned for easier left shift math */
1936 if (type_positive_bits(rl_type(left_orig
)) < 32)
1937 left
= cast_rl(&uint_ctype
, left_orig
);
1938 else if(type_positive_bits(rl_type(left_orig
)) == 63)
1939 left
= cast_rl(&ullong_ctype
, left_orig
);
1943 FOR_EACH_PTR(left
, tmp
) {
1947 if (min
.value
== 0 || max
.value
> sval_type_max(max
.type
).uvalue
>> shift
.uvalue
)
1949 if (min
.value
== 0 && max
.value
== 0)
1953 min
= sval_binop(min
, SPECIAL_LEFTSHIFT
, shift
);
1954 max
= sval_binop(max
, SPECIAL_LEFTSHIFT
, shift
);
1955 add_range(&ret
, min
, max
);
1956 } END_FOR_EACH_PTR(tmp
);
1958 if (!rl_fits_in_type(ret
, rl_type(left_orig
)))
1960 ret
= cast_rl(rl_type(left_orig
), ret
);
1962 add_range(&ret
, zero
, zero
);
1967 static struct range_list
*handle_rshift(struct range_list
*left_orig
, struct range_list
*right_orig
)
1969 struct data_range
*tmp
;
1970 struct range_list
*ret
= NULL
;
1971 sval_t shift
, min
, max
;
1973 if (!rl_to_sval(right_orig
, &shift
) || sval_is_negative(shift
))
1975 if (shift
.value
== 0)
1978 FOR_EACH_PTR(left_orig
, tmp
) {
1979 min
= sval_binop(tmp
->min
, SPECIAL_RIGHTSHIFT
, shift
);
1980 max
= sval_binop(tmp
->max
, SPECIAL_RIGHTSHIFT
, shift
);
1981 add_range(&ret
, min
, max
);
1982 } END_FOR_EACH_PTR(tmp
);
1987 struct range_list
*rl_binop(struct range_list
*left
, int op
, struct range_list
*right
)
1989 struct symbol
*cast_type
;
1990 sval_t left_sval
, right_sval
;
1991 struct range_list
*ret
= NULL
;
1993 cast_type
= rl_type(left
);
1994 if (sval_type_max(rl_type(left
)).uvalue
< sval_type_max(rl_type(right
)).uvalue
)
1995 cast_type
= rl_type(right
);
1996 if (sval_type_max(cast_type
).uvalue
< INT_MAX
)
1997 cast_type
= &int_ctype
;
1999 left
= cast_rl(cast_type
, left
);
2000 right
= cast_rl(cast_type
, right
);
2002 if (!left
&& !right
)
2005 if (rl_to_sval(left
, &left_sval
) && rl_to_sval(right
, &right_sval
)) {
2006 sval_t val
= sval_binop(left_sval
, op
, right_sval
);
2007 return alloc_rl(val
, val
);
2012 ret
= handle_mod_rl(left
, right
);
2015 ret
= handle_divide_rl(left
, right
);
2019 ret
= handle_add_mult_rl(left
, op
, right
);
2022 ret
= handle_OR_rl(left
, right
);
2025 ret
= handle_XOR_rl(left
, right
);
2028 ret
= handle_AND_rl(left
, right
);
2031 ret
= handle_sub_rl(left
, right
);
2033 case SPECIAL_RIGHTSHIFT
:
2034 return handle_rshift(left
, right
);
2035 case SPECIAL_LEFTSHIFT
:
2036 return handle_lshift(left
, right
);
2042 void free_data_info_allocs(void)
2044 struct allocator_struct
*desc
= &data_info_allocator
;
2045 struct allocation_blob
*blob
= desc
->blobs
;
2051 desc
->allocations
= 0;
2052 desc
->total_bytes
= 0;
2053 desc
->useful_bytes
= 0;
2054 desc
->freelist
= NULL
;
2056 struct allocation_blob
*next
= blob
->next
;
2057 blob_free(blob
, desc
->chunking
);
2060 clear_data_range_alloc();
2063 void split_comparison_rl(struct range_list
*left_orig
, int op
, struct range_list
*right_orig
,
2064 struct range_list
**left_true_rl
, struct range_list
**left_false_rl
,
2065 struct range_list
**right_true_rl
, struct range_list
**right_false_rl
)
2067 struct range_list
*left_true
, *left_false
;
2068 struct range_list
*right_true
, *right_false
;
2071 min
= sval_type_min(rl_type(left_orig
));
2072 max
= sval_type_max(rl_type(left_orig
));
2074 left_true
= clone_rl(left_orig
);
2075 left_false
= clone_rl(left_orig
);
2076 right_true
= clone_rl(right_orig
);
2077 right_false
= clone_rl(right_orig
);
2081 case SPECIAL_UNSIGNED_LT
:
2082 left_true
= remove_range(left_orig
, rl_max(right_orig
), max
);
2083 if (!sval_is_min(rl_min(right_orig
))) {
2084 left_false
= remove_range(left_orig
, min
, sub_one(rl_min(right_orig
)));
2087 right_true
= remove_range(right_orig
, min
, rl_min(left_orig
));
2088 if (!sval_is_max(rl_max(left_orig
)))
2089 right_false
= remove_range(right_orig
, add_one(rl_max(left_orig
)), max
);
2091 case SPECIAL_UNSIGNED_LTE
:
2093 if (!sval_is_max(rl_max(right_orig
)))
2094 left_true
= remove_range(left_orig
, add_one(rl_max(right_orig
)), max
);
2095 left_false
= remove_range(left_orig
, min
, rl_min(right_orig
));
2097 if (!sval_is_min(rl_min(left_orig
)))
2098 right_true
= remove_range(right_orig
, min
, sub_one(rl_min(left_orig
)));
2099 right_false
= remove_range(right_orig
, rl_max(left_orig
), max
);
2101 if (sval_cmp(rl_min(left_orig
), rl_min(right_orig
)) == 0)
2102 left_false
= remove_range(left_false
, rl_min(left_orig
), rl_min(left_orig
));
2103 if (sval_cmp(rl_max(left_orig
), rl_max(right_orig
)) == 0)
2104 right_false
= remove_range(right_false
, rl_max(left_orig
), rl_max(left_orig
));
2107 left_true
= rl_intersection(left_orig
, right_orig
);
2108 right_true
= clone_rl(left_true
);
2110 if (sval_cmp(rl_min(right_orig
), rl_max(right_orig
)) == 0)
2111 left_false
= remove_range(left_orig
, rl_min(right_orig
), rl_min(right_orig
));
2112 if (sval_cmp(rl_min(left_orig
), rl_max(left_orig
)) == 0)
2113 right_false
= remove_range(right_orig
, rl_min(left_orig
), rl_min(left_orig
));
2115 case SPECIAL_UNSIGNED_GTE
:
2117 if (!sval_is_min(rl_min(right_orig
)))
2118 left_true
= remove_range(left_orig
, min
, sub_one(rl_min(right_orig
)));
2119 left_false
= remove_range(left_orig
, rl_max(right_orig
), max
);
2121 if (!sval_is_max(rl_max(left_orig
)))
2122 right_true
= remove_range(right_orig
, add_one(rl_max(left_orig
)), max
);
2123 right_false
= remove_range(right_orig
, min
, rl_min(left_orig
));
2125 if (sval_cmp(rl_min(left_orig
), rl_min(right_orig
)) == 0)
2126 right_false
= remove_range(right_false
, rl_min(left_orig
), rl_min(left_orig
));
2127 if (sval_cmp(rl_max(left_orig
), rl_max(right_orig
)) == 0)
2128 left_false
= remove_range(left_false
, rl_max(left_orig
), rl_max(left_orig
));
2131 case SPECIAL_UNSIGNED_GT
:
2132 left_true
= remove_range(left_orig
, min
, rl_min(right_orig
));
2133 if (!sval_is_max(rl_max(right_orig
)))
2134 left_false
= remove_range(left_orig
, add_one(rl_max(right_orig
)), max
);
2136 right_true
= remove_range(right_orig
, rl_max(left_orig
), max
);
2137 if (!sval_is_min(rl_min(left_orig
)))
2138 right_false
= remove_range(right_orig
, min
, sub_one(rl_min(left_orig
)));
2140 case SPECIAL_NOTEQUAL
:
2141 left_false
= rl_intersection(left_orig
, right_orig
);
2142 right_false
= clone_rl(left_false
);
2144 if (sval_cmp(rl_min(right_orig
), rl_max(right_orig
)) == 0)
2145 left_true
= remove_range(left_orig
, rl_min(right_orig
), rl_min(right_orig
));
2146 if (sval_cmp(rl_min(left_orig
), rl_max(left_orig
)) == 0)
2147 right_true
= remove_range(right_orig
, rl_min(left_orig
), rl_min(left_orig
));
2150 sm_perror(" unhandled comparison %d", op
);
2154 *left_true_rl
= left_true
;
2155 *left_false_rl
= left_false
;
2157 if (right_true_rl
) {
2158 *right_true_rl
= right_true
;
2159 *right_false_rl
= right_false
;