2 * Copyright (C) 2009 Dan Carpenter.
4 * This program is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU General Public License
6 * as published by the Free Software Foundation; either version 2
7 * of the License, or (at your option) any later version.
9 * This program is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 * GNU General Public License for more details.
14 * You should have received a copy of the GNU General Public License
15 * along with this program; if not, see http://www.gnu.org/copyleft/gpl.txt
20 #include "smatch_extra.h"
21 #include "smatch_slist.h"
23 ALLOCATOR(data_info
, "smatch extra data");
24 ALLOCATOR(data_range
, "data range");
25 __DO_ALLOCATOR(struct data_range
, sizeof(struct data_range
), __alignof__(struct data_range
),
26 "permanent ranges", perm_data_range
);
27 __DECLARE_ALLOCATOR(struct ptr_list
, rl_ptrlist
);
29 bool is_err_ptr(sval_t sval
)
31 if (option_project
!= PROJ_KERNEL
)
33 if (!type_is_ptr(sval
.type
))
35 if (sval
.uvalue
< -4095ULL)
40 bool is_err_or_null(struct range_list
*rl
)
42 struct range_list
*no_null
;
47 if (rl_min(rl
).value
== 0 && rl_max(rl
).value
== 0)
50 if (rl_min(rl
).value
!= 0)
53 no_null
= remove_range(rl
, rl_min(rl
), rl_min(rl
));
55 return is_err_ptr(rl_min(no_null
)) && is_err_ptr(rl_max(no_null
));
58 static bool is_oxdead(struct range_list
*rl
)
62 /* check for 0xdead000000000000 */
63 if (!rl_to_sval(rl
, &sval
))
65 if ((sval
.uvalue
>> (bits_in_pointer
- 16)) == 0xdead)
71 bool is_noderef_ptr_rl(struct range_list
*rl
)
76 if (rl_min(rl
).value
== 0 && rl_max(rl
).value
== 0)
78 if (is_err_or_null(rl
))
85 static char *get_err_pointer_str(struct data_range
*drange
)
90 * The kernel has error pointers where you do essentially:
92 * return (void *)(unsigned long)-12;
94 * But what I want here is to print -12 instead of the unsigned version
98 if (!is_err_ptr(drange
->min
))
101 if (drange
->min
.value
== drange
->max
.value
)
102 snprintf(buf
, sizeof(buf
), "(%lld)", drange
->min
.value
);
104 snprintf(buf
, sizeof(buf
), "(%lld)-(%lld)", drange
->min
.value
, drange
->max
.value
);
108 char *show_rl(struct range_list
*list
)
110 struct data_range
*prev_drange
= NULL
;
111 struct data_range
*tmp
;
121 FOR_EACH_PTR(list
, tmp
) {
122 remain
= full
+ sizeof(full
) - p
;
124 snprintf(prev
, full
+ sizeof(full
) - prev
, ",%s-%s",
125 sval_to_str(prev_drange
->min
),
126 sval_to_str(sval_type_max(prev_drange
->min
.type
)));
132 err_ptr
= get_err_pointer_str(tmp
);
134 p
+= snprintf(p
, remain
, "%s%s", i
++ ? "," : "", err_ptr
);
135 } else if (sval_cmp(tmp
->min
, tmp
->max
) == 0) {
136 p
+= snprintf(p
, remain
, "%s%s", i
++ ? "," : "",
137 sval_to_str(tmp
->min
));
139 p
+= snprintf(p
, remain
, "%s%s-%s", i
++ ? "," : "",
140 sval_to_str(tmp
->min
),
141 sval_to_str(tmp
->max
));
143 } END_FOR_EACH_PTR(tmp
);
145 return alloc_sname(full
);
148 void free_all_rl(void)
150 clear_rl_ptrlist_alloc();
153 static int sval_too_big(struct symbol
*type
, sval_t sval
)
155 if (type_bits(type
) >= 32 &&
156 type_bits(sval
.type
) <= type_bits(type
))
158 if (sval
.uvalue
<= ((1ULL << type_bits(type
)) - 1))
160 if (type_signed(sval
.type
)) {
161 if (type_unsigned(type
)) {
162 unsigned long long neg
= ~sval
.uvalue
;
163 if (neg
<= sval_type_max(type
).uvalue
)
166 if (sval
.value
< sval_type_min(type
).value
)
168 if (sval
.value
> sval_type_max(type
).value
)
172 if (sval
.uvalue
> sval_type_max(type
).uvalue
)
177 static int truncates_nicely(struct symbol
*type
, sval_t min
, sval_t max
)
179 unsigned long long mask
;
180 int bits
= type_bits(type
);
182 if (type_is_fp(min
.type
) && !type_is_fp(type
))
185 if (bits
>= type_bits(min
.type
))
188 mask
= -1ULL << bits
;
189 return (min
.uvalue
& mask
) == (max
.uvalue
& mask
);
192 static void add_range_t(struct symbol
*type
, struct range_list
**rl
, sval_t min
, sval_t max
)
194 /* If we're just adding a number, cast it and add it */
195 if (sval_cmp(min
, max
) == 0) {
196 add_range(rl
, sval_cast(type
, min
), sval_cast(type
, max
));
200 /* If the range is within the type range then add it */
201 if (sval_fits(type
, min
) && sval_fits(type
, max
)) {
202 add_range(rl
, sval_cast(type
, min
), sval_cast(type
, max
));
206 if (truncates_nicely(type
, min
, max
)) {
207 add_range(rl
, sval_cast(type
, min
), sval_cast(type
, max
));
212 * If the range we are adding has more bits than the range type then
213 * add the whole range type. Eg:
214 * 0x8000000000000000 - 0xf000000000000000 -> cast to int
217 if (sval_too_big(type
, min
) || sval_too_big(type
, max
)) {
218 add_range(rl
, sval_type_min(type
), sval_type_max(type
));
222 /* Cast negative values to high positive values */
223 if (sval_is_negative(min
) && type_unsigned(type
)) {
224 if (sval_is_positive(max
)) {
225 if (sval_too_high(type
, max
)) {
226 add_range(rl
, sval_type_min(type
), sval_type_max(type
));
229 add_range(rl
, sval_type_val(type
, 0), sval_cast(type
, max
));
230 max
= sval_type_max(type
);
232 max
= sval_cast(type
, max
);
234 min
= sval_cast(type
, min
);
235 add_range(rl
, min
, max
);
238 /* Cast high positive numbers to negative */
239 if (sval_unsigned(max
) && sval_is_negative(sval_cast(type
, max
))) {
240 if (!sval_is_negative(sval_cast(type
, min
))) {
241 add_range(rl
, sval_cast(type
, min
), sval_type_max(type
));
242 min
= sval_type_min(type
);
244 min
= sval_cast(type
, min
);
246 max
= sval_cast(type
, max
);
247 add_range(rl
, min
, max
);
250 add_range(rl
, sval_cast(type
, min
), sval_cast(type
, max
));
254 static int str_to_comparison_arg_helper(const char *str
,
255 struct expression
*call
, int *comparison
,
256 struct expression
**arg
, const char **endp
)
268 *comparison
= SPECIAL_LTE
;
273 } else if (*c
== '=') {
276 *comparison
= SPECIAL_EQUAL
;
277 } else if (*c
== '>') {
280 *comparison
= SPECIAL_GTE
;
285 } else if (*c
== '!') {
288 *comparison
= SPECIAL_NOTEQUAL
;
289 } else if (*c
== '$') {
290 *comparison
= SPECIAL_EQUAL
;
299 param
= strtoll(c
, (char **)&c
, 10);
301 * FIXME: handle parameter math. [==$1 + 100]
307 if (*c
== ',' || *c
== ']')
308 c
++; /* skip the ']' character */
314 *arg
= get_argument_from_call_expr(call
->args
, param
);
317 if (*c
== '-' && *(c
+ 1) == '>') {
321 n
= snprintf(buf
, sizeof(buf
), "$%s", c
);
322 if (n
>= sizeof(buf
))
324 if (buf
[n
- 1] == ']')
326 *arg
= gen_expression_from_key(*arg
, buf
);
327 while (*c
&& *c
!= ']')
333 int str_to_comparison_arg(const char *str
, struct expression
*call
, int *comparison
, struct expression
**arg
)
342 return str_to_comparison_arg_helper(str
, call
, comparison
, arg
, NULL
);
345 static int get_val_from_key(int use_max
, struct symbol
*type
, const char *c
, struct expression
*call
, const char **endp
, sval_t
*sval
)
347 struct expression
*arg
;
352 ret
= sval_type_max(type
);
354 ret
= sval_type_min(type
);
356 if (!str_to_comparison_arg_helper(c
, call
, &comparison
, &arg
, endp
)) {
361 if (use_max
&& get_implied_max(arg
, &tmp
)) {
363 if (comparison
== '<') {
365 ret
= sval_binop(ret
, '-', tmp
);
368 if (!use_max
&& get_implied_min(arg
, &tmp
)) {
370 if (comparison
== '>') {
372 ret
= sval_binop(ret
, '+', tmp
);
380 static sval_t
add_one(sval_t sval
)
386 static sval_t
sub_one(sval_t sval
)
392 void filter_by_comparison(struct range_list
**rl
, int comparison
, struct range_list
*right
)
394 struct range_list
*left_orig
= *rl
;
395 struct range_list
*right_orig
= right
;
396 struct range_list
*ret_rl
= *rl
;
397 struct symbol
*cast_type
;
400 if (comparison
== UNKNOWN_COMPARISON
)
403 cast_type
= rl_type(left_orig
);
404 if (sval_type_max(rl_type(left_orig
)).uvalue
< sval_type_max(rl_type(right_orig
)).uvalue
)
405 cast_type
= rl_type(right_orig
);
406 if (sval_type_max(cast_type
).uvalue
< INT_MAX
)
407 cast_type
= &int_ctype
;
409 min
= sval_type_min(cast_type
);
410 max
= sval_type_max(cast_type
);
411 left_orig
= cast_rl(cast_type
, left_orig
);
412 right_orig
= cast_rl(cast_type
, right_orig
);
414 switch (comparison
) {
416 case SPECIAL_UNSIGNED_LT
:
417 ret_rl
= remove_range(left_orig
, rl_max(right_orig
), max
);
420 case SPECIAL_UNSIGNED_LTE
:
421 if (!sval_is_max(rl_max(right_orig
)))
422 ret_rl
= remove_range(left_orig
, add_one(rl_max(right_orig
)), max
);
425 ret_rl
= rl_intersection(left_orig
, right_orig
);
428 case SPECIAL_UNSIGNED_GTE
:
429 if (!sval_is_min(rl_min(right_orig
)))
430 ret_rl
= remove_range(left_orig
, min
, sub_one(rl_min(right_orig
)));
433 case SPECIAL_UNSIGNED_GT
:
434 ret_rl
= remove_range(left_orig
, min
, rl_min(right_orig
));
436 case SPECIAL_NOTEQUAL
:
437 if (sval_cmp(rl_min(right_orig
), rl_max(right_orig
)) == 0)
438 ret_rl
= remove_range(left_orig
, rl_min(right_orig
), rl_min(right_orig
));
441 sm_perror("unhandled comparison %s", show_special(comparison
));
445 *rl
= cast_rl(rl_type(*rl
), ret_rl
);
448 static struct range_list
*filter_by_comparison_call(const char *c
, struct expression
*call
, const char **endp
, struct range_list
*start_rl
)
451 struct expression
*arg
;
452 struct range_list
*casted_start
, *right_orig
;
455 /* For when we have a function that takes a function pointer. */
456 if (!call
|| call
->type
!= EXPR_CALL
)
459 if (!str_to_comparison_arg_helper(c
, call
, &comparison
, &arg
, endp
))
462 if (!get_implied_rl(arg
, &right_orig
))
466 if (type_positive_bits(rl_type(start_rl
)) > type_positive_bits(type
))
467 type
= rl_type(start_rl
);
468 if (type_positive_bits(rl_type(right_orig
)) > type_positive_bits(type
))
469 type
= rl_type(right_orig
);
471 casted_start
= cast_rl(type
, start_rl
);
472 right_orig
= cast_rl(type
, right_orig
);
474 filter_by_comparison(&casted_start
, comparison
, right_orig
);
475 return cast_rl(rl_type(start_rl
), casted_start
);
478 static sval_t
parse_val(int use_max
, struct expression
*call
, struct symbol
*type
, const char *c
, const char **endp
)
480 const char *start
= c
;
483 if (type
== &float_ctype
)
484 return sval_type_fval(type
, strtof(start
, (char **)endp
));
485 else if (type
== &double_ctype
)
486 return sval_type_fval(type
, strtod(start
, (char **)endp
));
487 else if (type
== &ldouble_ctype
)
488 return sval_type_fval(type
, strtold(start
, (char **)endp
));
490 if (!strncmp(start
, "max", 3)) {
491 ret
= sval_type_max(type
);
493 } else if (!strncmp(start
, "u64max", 6)) {
494 ret
= sval_type_val(type
, ULLONG_MAX
);
496 } else if (!strncmp(start
, "s64max", 6)) {
497 ret
= sval_type_val(type
, LLONG_MAX
);
499 } else if (!strncmp(start
, "u32max", 6)) {
500 ret
= sval_type_val(type
, UINT_MAX
);
502 } else if (!strncmp(start
, "s32max", 6)) {
503 ret
= sval_type_val(type
, INT_MAX
);
505 } else if (!strncmp(start
, "u16max", 6)) {
506 ret
= sval_type_val(type
, USHRT_MAX
);
508 } else if (!strncmp(start
, "s16max", 6)) {
509 ret
= sval_type_val(type
, SHRT_MAX
);
511 } else if (!strncmp(start
, "min", 3)) {
512 ret
= sval_type_min(type
);
514 } else if (!strncmp(start
, "s64min", 6)) {
515 ret
= sval_type_val(type
, LLONG_MIN
);
517 } else if (!strncmp(start
, "s32min", 6)) {
518 ret
= sval_type_val(type
, INT_MIN
);
520 } else if (!strncmp(start
, "s16min", 6)) {
521 ret
= sval_type_val(type
, SHRT_MIN
);
523 } else if (!strncmp(start
, "long_min", 8)) {
524 ret
= sval_type_val(type
, LONG_MIN
);
526 } else if (!strncmp(start
, "long_max", 8)) {
527 ret
= sval_type_val(type
, LONG_MAX
);
529 } else if (!strncmp(start
, "ulong_max", 9)) {
530 ret
= sval_type_val(type
, ULONG_MAX
);
532 } else if (!strncmp(start
, "ptr_max", 7)) {
533 ret
= sval_type_val(type
, valid_ptr_max
);
535 } else if (start
[0] == '[') {
536 /* this parses [==p0] comparisons */
537 get_val_from_key(1, type
, start
, call
, &c
, &ret
);
538 } else if (type_positive_bits(type
) == 64) {
539 ret
= sval_type_val(type
, strtoull(start
, (char **)&c
, 0));
541 ret
= sval_type_val(type
, strtoll(start
, (char **)&c
, 0));
547 static const char *jump_to_call_math(const char *value
)
549 const char *c
= value
;
551 while (*c
&& *c
!= '[')
557 if (*c
== '<' || *c
== '=' || *c
== '>' || *c
== '!')
563 static struct range_list
*get_param_return_rl(struct expression
*call
, const char *call_math
)
565 struct expression
*arg
;
569 param
= atoi(call_math
);
571 arg
= get_argument_from_call_expr(call
->args
, param
);
575 return db_return_vals_no_args(arg
);
578 static void str_to_rl_helper(struct expression
*call
, struct symbol
*type
, const char *str
, const char **endp
, struct range_list
**rl
)
580 struct range_list
*rl_tmp
= NULL
;
581 sval_t prev_min
, min
, max
;
584 prev_min
= sval_type_min(type
);
585 min
= sval_type_min(type
);
586 max
= sval_type_max(type
);
588 while (*c
!= '\0' && *c
!= '[') {
590 if (sval_cmp(min
, sval_type_min(type
)) != 0)
592 max
= sval_type_max(type
);
593 add_range_t(type
, &rl_tmp
, min
, max
);
598 min
= parse_val(0, call
, type
, c
, &c
);
599 if (!sval_fits(type
, min
))
600 min
= sval_type_min(type
);
604 if (*c
== '\0' || *c
== '[') {
605 add_range_t(type
, &rl_tmp
, min
, min
);
609 add_range_t(type
, &rl_tmp
, min
, min
);
615 max
= sval_type_max(type
);
616 add_range_t(type
, &rl_tmp
, min
, max
);
618 if (*c
== '[' || *c
== '\0')
622 sm_msg("debug XXX: trouble parsing %s c = %s", str
, c
);
628 max
= parse_val(1, call
, type
, c
, &c
);
629 if (!sval_fits(type
, max
))
630 max
= sval_type_max(type
);
632 max
= sval_type_max(type
);
633 add_range_t(type
, &rl_tmp
, min
, max
);
635 if (*c
== '[' || *c
== '\0')
639 add_range_t(type
, &rl_tmp
, min
, max
);
650 static void str_to_dinfo(struct expression
*call
, struct symbol
*type
, const char *value
, struct data_info
*dinfo
)
652 struct range_list
*math_rl
;
653 const char *call_math
;
655 struct range_list
*rl
= NULL
;
660 if (strcmp(value
, "empty") == 0)
663 if (strncmp(value
, "[==$", 4) == 0) {
664 struct expression
*arg
;
667 if (!str_to_comparison_arg(value
, call
, &comparison
, &arg
))
669 if (!get_implied_rl(arg
, &rl
))
674 str_to_rl_helper(call
, type
, value
, &c
, &rl
);
678 call_math
= jump_to_call_math(value
);
679 if (call_math
&& call_math
[0] == 'r') {
680 math_rl
= get_param_return_rl(call
, call_math
);
682 rl
= rl_intersection(rl
, math_rl
);
685 if (call_math
&& parse_call_math_rl(call
, call_math
, &math_rl
)) {
686 rl
= rl_intersection(rl
, math_rl
);
691 * For now if we already tried to handle the call math and couldn't
692 * figure it out then bail.
694 if (jump_to_call_math(c
) == c
+ 1)
697 rl
= filter_by_comparison_call(c
, call
, &c
, rl
);
700 rl
= cast_rl(type
, rl
);
701 dinfo
->value_ranges
= rl
;
704 static int rl_is_sane(struct range_list
*rl
)
706 struct data_range
*tmp
;
710 FOR_EACH_PTR(rl
, tmp
) {
711 if (!sval_fits(type
, tmp
->min
))
713 if (!sval_fits(type
, tmp
->max
))
715 if (sval_cmp(tmp
->min
, tmp
->max
) > 0)
717 } END_FOR_EACH_PTR(tmp
);
722 void str_to_rl(struct symbol
*type
, char *value
, struct range_list
**rl
)
724 struct data_info dinfo
= {};
726 str_to_dinfo(NULL
, type
, value
, &dinfo
);
727 if (!rl_is_sane(dinfo
.value_ranges
))
728 dinfo
.value_ranges
= alloc_whole_rl(type
);
729 *rl
= dinfo
.value_ranges
;
732 void call_results_to_rl(struct expression
*expr
, struct symbol
*type
, const char *value
, struct range_list
**rl
)
734 struct data_info dinfo
= {};
736 str_to_dinfo(strip_expr(expr
), type
, value
, &dinfo
);
737 *rl
= dinfo
.value_ranges
;
740 int is_whole_rl(struct range_list
*rl
)
742 struct data_range
*drange
;
744 if (ptr_list_empty((struct ptr_list
*)rl
))
746 drange
= first_ptr_list((struct ptr_list
*)rl
);
747 if (sval_is_min(drange
->min
) && sval_is_max(drange
->max
))
752 int is_unknown_ptr(struct range_list
*rl
)
754 struct data_range
*drange
;
760 FOR_EACH_PTR(rl
, drange
) {
763 if (sval_cmp(drange
->min
, valid_ptr_min_sval
) == 0 &&
764 sval_cmp(drange
->max
, valid_ptr_max_sval
) == 0)
766 } END_FOR_EACH_PTR(drange
);
771 int is_whole_rl_non_zero(struct range_list
*rl
)
773 struct data_range
*drange
;
775 if (ptr_list_empty((struct ptr_list
*)rl
))
777 drange
= first_ptr_list((struct ptr_list
*)rl
);
778 if (sval_unsigned(drange
->min
) &&
779 drange
->min
.value
== 1 &&
780 sval_is_max(drange
->max
))
782 if (!sval_is_min(drange
->min
) || drange
->max
.value
!= -1)
784 drange
= last_ptr_list((struct ptr_list
*)rl
);
785 if (drange
->min
.value
!= 1 || !sval_is_max(drange
->max
))
790 sval_t
rl_min(struct range_list
*rl
)
792 struct data_range
*drange
;
795 ret
.type
= &llong_ctype
;
796 ret
.value
= LLONG_MIN
;
797 if (ptr_list_empty((struct ptr_list
*)rl
))
799 drange
= first_ptr_list((struct ptr_list
*)rl
);
803 sval_t
rl_max(struct range_list
*rl
)
805 struct data_range
*drange
;
808 ret
.type
= &llong_ctype
;
809 ret
.value
= LLONG_MAX
;
810 if (ptr_list_empty((struct ptr_list
*)rl
))
812 drange
= last_ptr_list((struct ptr_list
*)rl
);
816 int rl_to_sval(struct range_list
*rl
, sval_t
*sval
)
825 if (sval_cmp(min
, max
) != 0)
831 struct symbol
*rl_type(struct range_list
*rl
)
835 return rl_min(rl
).type
;
838 static struct data_range
*alloc_range_helper_sval(sval_t min
, sval_t max
, int perm
)
840 struct data_range
*ret
;
843 ret
= __alloc_perm_data_range(0);
845 ret
= __alloc_data_range(0);
851 struct data_range
*alloc_range(sval_t min
, sval_t max
)
853 return alloc_range_helper_sval(min
, max
, 0);
856 struct data_range
*alloc_range_perm(sval_t min
, sval_t max
)
858 return alloc_range_helper_sval(min
, max
, 1);
861 struct range_list
*alloc_rl(sval_t min
, sval_t max
)
863 struct range_list
*rl
= NULL
;
865 if (sval_cmp(min
, max
) > 0)
866 return alloc_whole_rl(min
.type
);
868 add_range(&rl
, min
, max
);
872 struct range_list
*alloc_whole_rl(struct symbol
*type
)
874 if (!type
|| type_positive_bits(type
) < 0)
876 if (type
->type
== SYM_ARRAY
)
879 return alloc_rl(sval_type_min(type
), sval_type_max(type
));
882 static bool collapse_pointer_rl(struct range_list
**rl
, sval_t min
, sval_t max
)
884 struct range_list
*new_rl
= NULL
;
885 struct data_range
*tmp
;
891 * With the mtag work, then we end up getting huge lists of mtags.
892 * That seems cool, but the problem is that we can only store about
893 * 8-10 mtags in the DB before we truncate the list. Also the mtags
894 * aren't really used at all so it's a waste of resources for now...
895 * In the future, we maybe will revisit this code.
902 if (!type_is_ptr(min
.type
))
905 if (ptr_list_size((struct ptr_list
*)*rl
) < 8)
907 FOR_EACH_PTR(*rl
, tmp
) {
908 if (!is_err_ptr(tmp
->min
))
910 } END_FOR_EACH_PTR(tmp
);
914 FOR_EACH_PTR(*rl
, tmp
) {
915 if (sval_cmp(tmp
->min
, valid_ptr_min_sval
) >= 0 &&
916 sval_cmp(tmp
->max
, valid_ptr_max_sval
) <= 0)
917 add_range(&new_rl
, valid_ptr_min_sval
, valid_ptr_max_sval
);
919 add_range(&new_rl
, tmp
->min
, tmp
->max
);
920 } END_FOR_EACH_PTR(tmp
);
922 add_range(&new_rl
, min
, max
);
931 extern int rl_ptrlist_hack
;
932 void add_range(struct range_list
**list
, sval_t min
, sval_t max
)
934 struct data_range
*tmp
;
935 struct data_range
*new = NULL
;
939 * There is at least on valid reason why the types might be confusing
940 * and that's when you have a void pointer and on some paths you treat
941 * it as a u8 pointer and on other paths you treat it as a u16 pointer.
942 * This case is hard to deal with.
944 * There are other cases where we probably should be more specific about
945 * the types than we are. For example, we end up merging a lot of ulong
946 * with pointers and I have not figured out why we do that.
948 * But this hack works for both cases, I think. We cast it to pointers
949 * or we use the bigger size.
952 if (*list
&& rl_type(*list
) != min
.type
) {
953 if (rl_type(*list
)->type
== SYM_PTR
) {
954 min
= sval_cast(rl_type(*list
), min
);
955 max
= sval_cast(rl_type(*list
), max
);
956 } else if (min
.type
->type
== SYM_PTR
) {
957 *list
= cast_rl(min
.type
, *list
);
958 } else if (type_bits(rl_type(*list
)) >= type_bits(min
.type
)) {
959 min
= sval_cast(rl_type(*list
), min
);
960 max
= sval_cast(rl_type(*list
), max
);
962 *list
= cast_rl(min
.type
, *list
);
966 if (sval_cmp(min
, max
) > 0) {
967 min
= sval_type_min(min
.type
);
968 max
= sval_type_max(min
.type
);
971 if (collapse_pointer_rl(list
, min
, max
))
975 * FIXME: This has a problem merging a range_list like: min-0,3-max
976 * with a range like 1-2. You end up with min-2,3-max instead of
979 FOR_EACH_PTR(*list
, tmp
) {
981 /* Sometimes we overlap with more than one range
982 so we have to delete or modify the next range. */
983 if (!sval_is_max(max
) && max
.value
+ 1 == tmp
->min
.value
) {
984 /* join 2 ranges here */
986 DELETE_CURRENT_PTR(tmp
);
990 /* Doesn't overlap with the next one. */
991 if (sval_cmp(max
, tmp
->min
) < 0)
994 if (sval_cmp(max
, tmp
->max
) <= 0) {
995 /* Partially overlaps the next one. */
997 DELETE_CURRENT_PTR(tmp
);
1000 /* Completely overlaps the next one. */
1001 DELETE_CURRENT_PTR(tmp
);
1002 /* there could be more ranges to delete */
1006 if (!sval_is_max(max
) && max
.value
+ 1 == tmp
->min
.value
) {
1007 /* join 2 ranges into a big range */
1008 new = alloc_range(min
, tmp
->max
);
1009 REPLACE_CURRENT_PTR(tmp
, new);
1012 if (sval_cmp(max
, tmp
->min
) < 0) { /* new range entirely below */
1013 new = alloc_range(min
, max
);
1014 INSERT_CURRENT(new, tmp
);
1017 if (sval_cmp(min
, tmp
->min
) < 0) { /* new range partially below */
1018 if (sval_cmp(max
, tmp
->max
) < 0)
1022 new = alloc_range(min
, max
);
1023 REPLACE_CURRENT_PTR(tmp
, new);
1028 if (sval_cmp(max
, tmp
->max
) <= 0) /* new range already included */
1030 if (sval_cmp(min
, tmp
->max
) <= 0) { /* new range partially above */
1032 new = alloc_range(min
, max
);
1033 REPLACE_CURRENT_PTR(tmp
, new);
1037 if (!sval_is_min(min
) && min
.value
- 1 == tmp
->max
.value
) {
1038 /* join 2 ranges into a big range */
1039 new = alloc_range(tmp
->min
, max
);
1040 REPLACE_CURRENT_PTR(tmp
, new);
1044 /* the new range is entirely above the existing ranges */
1045 } END_FOR_EACH_PTR(tmp
);
1048 new = alloc_range(min
, max
);
1050 rl_ptrlist_hack
= 1;
1051 add_ptr_list(list
, new);
1052 rl_ptrlist_hack
= 0;
1055 struct range_list
*clone_rl(struct range_list
*list
)
1057 struct data_range
*tmp
;
1058 struct range_list
*ret
= NULL
;
1060 FOR_EACH_PTR(list
, tmp
) {
1061 add_ptr_list(&ret
, tmp
);
1062 } END_FOR_EACH_PTR(tmp
);
1066 struct range_list
*clone_rl_permanent(struct range_list
*list
)
1068 struct data_range
*tmp
;
1069 struct data_range
*new;
1070 struct range_list
*ret
= NULL
;
1072 FOR_EACH_PTR(list
, tmp
) {
1073 new = alloc_range_perm(tmp
->min
, tmp
->max
);
1074 add_ptr_list(&ret
, new);
1075 } END_FOR_EACH_PTR(tmp
);
1079 struct range_list
*rl_union(struct range_list
*one
, struct range_list
*two
)
1081 struct data_range
*tmp
;
1082 struct range_list
*ret
= NULL
;
1084 FOR_EACH_PTR(one
, tmp
) {
1085 add_range(&ret
, tmp
->min
, tmp
->max
);
1086 } END_FOR_EACH_PTR(tmp
);
1087 FOR_EACH_PTR(two
, tmp
) {
1088 add_range(&ret
, tmp
->min
, tmp
->max
);
1089 } END_FOR_EACH_PTR(tmp
);
1093 struct range_list
*remove_range(struct range_list
*list
, sval_t min
, sval_t max
)
1095 struct data_range
*tmp
;
1096 struct range_list
*ret
= NULL
;
1101 min
= sval_cast(rl_type(list
), min
);
1102 max
= sval_cast(rl_type(list
), max
);
1103 if (sval_cmp(min
, max
) > 0) {
1109 FOR_EACH_PTR(list
, tmp
) {
1110 if (sval_cmp(tmp
->max
, min
) < 0) {
1111 add_range(&ret
, tmp
->min
, tmp
->max
);
1114 if (sval_cmp(tmp
->min
, max
) > 0) {
1115 add_range(&ret
, tmp
->min
, tmp
->max
);
1118 if (sval_cmp(tmp
->min
, min
) >= 0 && sval_cmp(tmp
->max
, max
) <= 0)
1120 if (sval_cmp(tmp
->min
, min
) >= 0) {
1122 add_range(&ret
, max
, tmp
->max
);
1123 } else if (sval_cmp(tmp
->max
, max
) <= 0) {
1125 add_range(&ret
, tmp
->min
, min
);
1129 add_range(&ret
, tmp
->min
, min
);
1130 add_range(&ret
, max
, tmp
->max
);
1132 } END_FOR_EACH_PTR(tmp
);
1136 int ranges_equiv(struct data_range
*one
, struct data_range
*two
)
1142 if (sval_cmp(one
->min
, two
->min
) != 0)
1144 if (sval_cmp(one
->max
, two
->max
) != 0)
1149 int rl_equiv(struct range_list
*one
, struct range_list
*two
)
1151 struct data_range
*one_range
;
1152 struct data_range
*two_range
;
1157 PREPARE_PTR_LIST(one
, one_range
);
1158 PREPARE_PTR_LIST(two
, two_range
);
1160 if (!one_range
&& !two_range
)
1162 if (!ranges_equiv(one_range
, two_range
))
1164 NEXT_PTR_LIST(one_range
);
1165 NEXT_PTR_LIST(two_range
);
1167 FINISH_PTR_LIST(two_range
);
1168 FINISH_PTR_LIST(one_range
);
1173 int true_comparison_range(struct data_range
*left
, int comparison
, struct data_range
*right
)
1175 switch (comparison
) {
1177 case SPECIAL_UNSIGNED_LT
:
1178 if (sval_cmp(left
->min
, right
->max
) < 0)
1181 case SPECIAL_UNSIGNED_LTE
:
1183 if (sval_cmp(left
->min
, right
->max
) <= 0)
1187 if (sval_cmp(left
->max
, right
->min
) < 0)
1189 if (sval_cmp(left
->min
, right
->max
) > 0)
1192 case SPECIAL_UNSIGNED_GTE
:
1194 if (sval_cmp(left
->max
, right
->min
) >= 0)
1198 case SPECIAL_UNSIGNED_GT
:
1199 if (sval_cmp(left
->max
, right
->min
) > 0)
1202 case SPECIAL_NOTEQUAL
:
1203 if (sval_cmp(left
->min
, left
->max
) != 0)
1205 if (sval_cmp(right
->min
, right
->max
) != 0)
1207 if (sval_cmp(left
->min
, right
->min
) != 0)
1211 sm_perror("unhandled comparison %d", comparison
);
1217 int true_comparison_range_LR(int comparison
, struct data_range
*var
, struct data_range
*val
, int left
)
1220 return true_comparison_range(var
, comparison
, val
);
1222 return true_comparison_range(val
, comparison
, var
);
1225 static int false_comparison_range_sval(struct data_range
*left
, int comparison
, struct data_range
*right
)
1227 switch (comparison
) {
1229 case SPECIAL_UNSIGNED_LT
:
1230 if (sval_cmp(left
->max
, right
->min
) >= 0)
1233 case SPECIAL_UNSIGNED_LTE
:
1235 if (sval_cmp(left
->max
, right
->min
) > 0)
1239 if (sval_cmp(left
->min
, left
->max
) != 0)
1241 if (sval_cmp(right
->min
, right
->max
) != 0)
1243 if (sval_cmp(left
->min
, right
->min
) != 0)
1246 case SPECIAL_UNSIGNED_GTE
:
1248 if (sval_cmp(left
->min
, right
->max
) < 0)
1252 case SPECIAL_UNSIGNED_GT
:
1253 if (sval_cmp(left
->min
, right
->max
) <= 0)
1256 case SPECIAL_NOTEQUAL
:
1257 if (sval_cmp(left
->max
, right
->min
) < 0)
1259 if (sval_cmp(left
->min
, right
->max
) > 0)
1263 sm_perror("unhandled comparison %d", comparison
);
1269 int false_comparison_range_LR(int comparison
, struct data_range
*var
, struct data_range
*val
, int left
)
1272 return false_comparison_range_sval(var
, comparison
, val
);
1274 return false_comparison_range_sval(val
, comparison
, var
);
1277 int possibly_true(struct expression
*left
, int comparison
, struct expression
*right
)
1279 struct range_list
*rl_left
, *rl_right
;
1280 struct data_range
*tmp_left
, *tmp_right
;
1281 struct symbol
*type
;
1283 if (comparison
== UNKNOWN_COMPARISON
)
1285 if (!get_implied_rl(left
, &rl_left
))
1287 if (!get_implied_rl(right
, &rl_right
))
1290 type
= rl_type(rl_left
);
1291 if (type_positive_bits(type
) < type_positive_bits(rl_type(rl_right
)))
1292 type
= rl_type(rl_right
);
1293 if (type_positive_bits(type
) < 31)
1296 rl_left
= cast_rl(type
, rl_left
);
1297 rl_right
= cast_rl(type
, rl_right
);
1299 FOR_EACH_PTR(rl_left
, tmp_left
) {
1300 FOR_EACH_PTR(rl_right
, tmp_right
) {
1301 if (true_comparison_range(tmp_left
, comparison
, tmp_right
))
1303 } END_FOR_EACH_PTR(tmp_right
);
1304 } END_FOR_EACH_PTR(tmp_left
);
1308 int possibly_false(struct expression
*left
, int comparison
, struct expression
*right
)
1310 struct range_list
*rl_left
, *rl_right
;
1311 struct data_range
*tmp_left
, *tmp_right
;
1312 struct symbol
*type
;
1314 if (!get_implied_rl(left
, &rl_left
))
1316 if (!get_implied_rl(right
, &rl_right
))
1319 type
= rl_type(rl_left
);
1320 if (type_positive_bits(type
) < type_positive_bits(rl_type(rl_right
)))
1321 type
= rl_type(rl_right
);
1322 if (type_positive_bits(type
) < 31)
1325 rl_left
= cast_rl(type
, rl_left
);
1326 rl_right
= cast_rl(type
, rl_right
);
1328 FOR_EACH_PTR(rl_left
, tmp_left
) {
1329 FOR_EACH_PTR(rl_right
, tmp_right
) {
1330 if (false_comparison_range_sval(tmp_left
, comparison
, tmp_right
))
1332 } END_FOR_EACH_PTR(tmp_right
);
1333 } END_FOR_EACH_PTR(tmp_left
);
1337 int possibly_true_rl(struct range_list
*left_ranges
, int comparison
, struct range_list
*right_ranges
)
1339 struct data_range
*left_tmp
, *right_tmp
;
1340 struct symbol
*type
;
1342 if (!left_ranges
|| !right_ranges
|| comparison
== UNKNOWN_COMPARISON
)
1345 type
= rl_type(left_ranges
);
1346 if (type_positive_bits(type
) < type_positive_bits(rl_type(right_ranges
)))
1347 type
= rl_type(right_ranges
);
1348 if (type_positive_bits(type
) < 31)
1351 left_ranges
= cast_rl(type
, left_ranges
);
1352 right_ranges
= cast_rl(type
, right_ranges
);
1354 FOR_EACH_PTR(left_ranges
, left_tmp
) {
1355 FOR_EACH_PTR(right_ranges
, right_tmp
) {
1356 if (true_comparison_range(left_tmp
, comparison
, right_tmp
))
1358 } END_FOR_EACH_PTR(right_tmp
);
1359 } END_FOR_EACH_PTR(left_tmp
);
1363 int possibly_false_rl(struct range_list
*left_ranges
, int comparison
, struct range_list
*right_ranges
)
1365 struct data_range
*left_tmp
, *right_tmp
;
1366 struct symbol
*type
;
1368 if (!left_ranges
|| !right_ranges
|| comparison
== UNKNOWN_COMPARISON
)
1371 type
= rl_type(left_ranges
);
1372 if (type_positive_bits(type
) < type_positive_bits(rl_type(right_ranges
)))
1373 type
= rl_type(right_ranges
);
1374 if (type_positive_bits(type
) < 31)
1377 left_ranges
= cast_rl(type
, left_ranges
);
1378 right_ranges
= cast_rl(type
, right_ranges
);
1380 FOR_EACH_PTR(left_ranges
, left_tmp
) {
1381 FOR_EACH_PTR(right_ranges
, right_tmp
) {
1382 if (false_comparison_range_sval(left_tmp
, comparison
, right_tmp
))
1384 } END_FOR_EACH_PTR(right_tmp
);
1385 } END_FOR_EACH_PTR(left_tmp
);
1389 /* FIXME: the _rl here stands for right left so really it should be _lr */
1390 int possibly_true_rl_LR(int comparison
, struct range_list
*a
, struct range_list
*b
, int left
)
1393 return possibly_true_rl(a
, comparison
, b
);
1395 return possibly_true_rl(b
, comparison
, a
);
1398 int possibly_false_rl_LR(int comparison
, struct range_list
*a
, struct range_list
*b
, int left
)
1401 return possibly_false_rl(a
, comparison
, b
);
1403 return possibly_false_rl(b
, comparison
, a
);
1406 int rl_has_sval(struct range_list
*rl
, sval_t sval
)
1408 struct data_range
*tmp
;
1410 FOR_EACH_PTR(rl
, tmp
) {
1411 if (sval_cmp(tmp
->min
, sval
) <= 0 &&
1412 sval_cmp(tmp
->max
, sval
) >= 0)
1414 } END_FOR_EACH_PTR(tmp
);
1418 void tack_on(struct range_list
**list
, struct data_range
*drange
)
1420 add_ptr_list(list
, drange
);
1423 void push_rl(struct range_list_stack
**rl_stack
, struct range_list
*rl
)
1425 add_ptr_list(rl_stack
, rl
);
1428 struct range_list
*pop_rl(struct range_list_stack
**rl_stack
)
1430 struct range_list
*rl
;
1432 rl
= last_ptr_list((struct ptr_list
*)*rl_stack
);
1433 delete_ptr_list_last((struct ptr_list
**)rl_stack
);
1437 struct range_list
*top_rl(struct range_list_stack
*rl_stack
)
1439 struct range_list
*rl
;
1441 rl
= last_ptr_list((struct ptr_list
*)rl_stack
);
1445 void filter_top_rl(struct range_list_stack
**rl_stack
, struct range_list
*filter
)
1447 struct range_list
*rl
;
1449 rl
= pop_rl(rl_stack
);
1450 rl
= rl_filter(rl
, filter
);
1451 push_rl(rl_stack
, rl
);
1454 struct range_list
*rl_truncate_cast(struct symbol
*type
, struct range_list
*rl
)
1456 struct data_range
*tmp
;
1457 struct range_list
*ret
= NULL
;
1463 if (!type
|| type
== rl_type(rl
))
1466 FOR_EACH_PTR(rl
, tmp
) {
1469 if (type_bits(type
) < type_bits(rl_type(rl
))) {
1470 min
.uvalue
= tmp
->min
.uvalue
& ((1ULL << type_bits(type
)) - 1);
1471 max
.uvalue
= tmp
->max
.uvalue
& ((1ULL << type_bits(type
)) - 1);
1473 if (sval_cmp(min
, max
) > 0) {
1474 min
= sval_cast(type
, min
);
1475 max
= sval_cast(type
, max
);
1477 add_range_t(type
, &ret
, min
, max
);
1478 } END_FOR_EACH_PTR(tmp
);
1483 int rl_fits_in_type(struct range_list
*rl
, struct symbol
*type
)
1485 if (type_bits(rl_type(rl
)) <= type_bits(type
))
1487 if (sval_cmp(rl_max(rl
), sval_type_max(type
)) > 0)
1489 if (sval_is_negative(rl_min(rl
)) &&
1490 sval_cmp(rl_min(rl
), sval_type_min(type
)) < 0)
1495 static int rl_type_consistent(struct range_list
*rl
)
1497 struct data_range
*tmp
;
1498 struct symbol
*type
;
1501 FOR_EACH_PTR(rl
, tmp
) {
1502 if (type
!= tmp
->min
.type
|| type
!= tmp
->max
.type
)
1504 } END_FOR_EACH_PTR(tmp
);
1508 static struct range_list
*cast_to_bool(struct range_list
*rl
)
1510 struct data_range
*tmp
;
1511 struct range_list
*ret
= NULL
;
1514 sval_t min
= { .type
= &bool_ctype
};
1515 sval_t max
= { .type
= &bool_ctype
};
1517 FOR_EACH_PTR(rl
, tmp
) {
1518 if (tmp
->min
.value
|| tmp
->max
.value
)
1520 if (sval_is_negative(tmp
->min
) &&
1521 sval_is_negative(tmp
->max
))
1523 if (tmp
->min
.value
== 0 ||
1524 tmp
->max
.value
== 0)
1526 if (sval_is_negative(tmp
->min
) &&
1529 } END_FOR_EACH_PTR(tmp
);
1536 add_range(&ret
, min
, max
);
1540 struct range_list
*cast_rl(struct symbol
*type
, struct range_list
*rl
)
1542 struct data_range
*tmp
;
1543 struct range_list
*ret
= NULL
;
1550 if (!rl_is_sane(rl
))
1551 return alloc_whole_rl(type
);
1552 if (type
== rl_type(rl
) && rl_type_consistent(rl
))
1555 if (type
== &bool_ctype
)
1556 return cast_to_bool(rl
);
1558 FOR_EACH_PTR(rl
, tmp
) {
1559 add_range_t(type
, &ret
, tmp
->min
, tmp
->max
);
1560 } END_FOR_EACH_PTR(tmp
);
1563 return alloc_whole_rl(type
);
1568 struct range_list
*rl_filter(struct range_list
*rl
, struct range_list
*filter
)
1570 struct data_range
*tmp
;
1572 FOR_EACH_PTR(filter
, tmp
) {
1573 rl
= remove_range(rl
, tmp
->min
, tmp
->max
);
1574 } END_FOR_EACH_PTR(tmp
);
1579 struct range_list
*do_intersection(struct range_list
*one_rl
, struct range_list
*two_rl
)
1581 struct data_range
*one
, *two
;
1582 struct range_list
*ret
= NULL
;
1585 PREPARE_PTR_LIST(one_rl
, one
);
1586 PREPARE_PTR_LIST(two_rl
, two
);
1591 if (sval_cmp(one
->max
, two
->min
) < 0) {
1595 if (sval_cmp(one
->min
, two
->min
) < 0 && sval_cmp(one
->max
, two
->max
) <= 0) {
1596 add_range(&ret
, two
->min
, one
->max
);
1600 if (sval_cmp(one
->min
, two
->min
) >= 0 && sval_cmp(one
->max
, two
->max
) <= 0) {
1601 add_range(&ret
, one
->min
, one
->max
);
1605 if (sval_cmp(one
->min
, two
->min
) < 0 && sval_cmp(one
->max
, two
->max
) > 0) {
1606 add_range(&ret
, two
->min
, two
->max
);
1610 if (sval_cmp(one
->min
, two
->max
) <= 0 && sval_cmp(one
->max
, two
->max
) > 0) {
1611 add_range(&ret
, one
->min
, two
->max
);
1615 if (sval_cmp(one
->min
, two
->max
) <= 0) {
1616 sm_fatal("error calculating intersection of '%s' and '%s'", show_rl(one_rl
), show_rl(two_rl
));
1622 FINISH_PTR_LIST(two
);
1623 FINISH_PTR_LIST(one
);
1628 struct range_list
*rl_intersection(struct range_list
*one
, struct range_list
*two
)
1630 struct range_list
*ret
;
1631 struct symbol
*ret_type
;
1632 struct symbol
*small_type
;
1633 struct symbol
*large_type
;
1638 ret_type
= rl_type(one
);
1639 small_type
= rl_type(one
);
1640 large_type
= rl_type(two
);
1642 if (type_bits(rl_type(two
)) < type_bits(small_type
)) {
1643 small_type
= rl_type(two
);
1644 large_type
= rl_type(one
);
1647 one
= cast_rl(large_type
, one
);
1648 two
= cast_rl(large_type
, two
);
1650 ret
= do_intersection(one
, two
);
1651 return cast_rl(ret_type
, ret
);
1654 static struct range_list
*handle_mod_rl(struct range_list
*left
, struct range_list
*right
)
1659 max
= rl_max(right
);
1660 if (sval_is_max(max
))
1665 if (sval_is_negative(max
))
1667 if (sval_cmp(rl_max(left
), max
) < 0)
1671 return alloc_rl(zero
, max
);
1674 static struct range_list
*get_neg_rl(struct range_list
*rl
)
1676 struct data_range
*tmp
;
1677 struct data_range
*new;
1678 struct range_list
*ret
= NULL
;
1682 if (sval_is_positive(rl_min(rl
)))
1685 FOR_EACH_PTR(rl
, tmp
) {
1686 if (sval_is_positive(tmp
->min
))
1688 if (sval_is_positive(tmp
->max
)) {
1689 new = alloc_range(tmp
->min
, tmp
->max
);
1690 new->max
.value
= -1;
1691 add_range(&ret
, new->min
, new->max
);
1694 add_range(&ret
, tmp
->min
, tmp
->max
);
1695 } END_FOR_EACH_PTR(tmp
);
1700 static struct range_list
*get_pos_rl(struct range_list
*rl
)
1702 struct data_range
*tmp
;
1703 struct data_range
*new;
1704 struct range_list
*ret
= NULL
;
1708 if (sval_is_negative(rl_max(rl
)))
1711 FOR_EACH_PTR(rl
, tmp
) {
1712 if (sval_is_negative(tmp
->max
))
1714 if (sval_is_positive(tmp
->min
)) {
1715 add_range(&ret
, tmp
->min
, tmp
->max
);
1718 new = alloc_range(tmp
->min
, tmp
->max
);
1720 add_range(&ret
, new->min
, new->max
);
1721 } END_FOR_EACH_PTR(tmp
);
1726 static struct range_list
*divide_rl_helper(struct range_list
*left
, struct range_list
*right
)
1728 sval_t right_min
, right_max
;
1731 if (!left
|| !right
)
1734 /* let's assume we never divide by zero */
1735 right_min
= rl_min(right
);
1736 right_max
= rl_max(right
);
1737 if (right_min
.value
== 0 && right_max
.value
== 0)
1739 if (right_min
.value
== 0)
1740 right_min
.value
= 1;
1741 if (right_max
.value
== 0)
1742 right_max
.value
= -1;
1744 max
= sval_binop(rl_max(left
), '/', right_min
);
1745 min
= sval_binop(rl_min(left
), '/', right_max
);
1747 return alloc_rl(min
, max
);
1750 static struct range_list
*handle_divide_rl(struct range_list
*left
, struct range_list
*right
)
1752 struct range_list
*left_neg
, *left_pos
, *right_neg
, *right_pos
;
1753 struct range_list
*neg_neg
, *neg_pos
, *pos_neg
, *pos_pos
;
1754 struct range_list
*ret
;
1756 if (is_whole_rl(right
))
1759 left_neg
= get_neg_rl(left
);
1760 left_pos
= get_pos_rl(left
);
1761 right_neg
= get_neg_rl(right
);
1762 right_pos
= get_pos_rl(right
);
1764 neg_neg
= divide_rl_helper(left_neg
, right_neg
);
1765 neg_pos
= divide_rl_helper(left_neg
, right_pos
);
1766 pos_neg
= divide_rl_helper(left_pos
, right_neg
);
1767 pos_pos
= divide_rl_helper(left_pos
, right_pos
);
1769 ret
= rl_union(neg_neg
, neg_pos
);
1770 ret
= rl_union(ret
, pos_neg
);
1771 return rl_union(ret
, pos_pos
);
1774 static struct range_list
*ptr_add_mult(struct range_list
*left
, int op
, struct range_list
*right
)
1776 struct range_list
*ret
;
1777 sval_t l_sval
, r_sval
, res
;
1780 * This function is sort of the wrong API because it takes two pointer
1781 * and adds them together. The caller is expected to figure out
1782 * alignment. Neither of those are the correct things to do.
1784 * Really this function is quite bogus...
1787 if (rl_to_sval(left
, &l_sval
) && rl_to_sval(right
, &r_sval
)) {
1788 res
= sval_binop(l_sval
, op
, r_sval
);
1789 return alloc_rl(res
, res
);
1792 if (rl_min(left
).value
!= 0 || rl_max(right
).value
!= 0) {
1793 ret
= alloc_rl(valid_ptr_min_sval
, valid_ptr_max_sval
);
1794 return cast_rl(rl_type(left
), ret
);
1797 return alloc_whole_rl(rl_type(left
));
1800 static struct range_list
*handle_add_mult_rl(struct range_list
*left
, int op
, struct range_list
*right
)
1804 if (type_is_ptr(rl_type(left
)) || type_is_ptr(rl_type(right
)))
1805 return ptr_add_mult(left
, op
, right
);
1807 if (sval_binop_overflows(rl_min(left
), op
, rl_min(right
)))
1809 min
= sval_binop(rl_min(left
), op
, rl_min(right
));
1811 if (sval_binop_overflows(rl_max(left
), op
, rl_max(right
)))
1813 max
= sval_binop(rl_max(left
), op
, rl_max(right
));
1815 return alloc_rl(min
, max
);
1818 static struct range_list
*handle_sub_rl(struct range_list
*left_orig
, struct range_list
*right_orig
)
1820 struct range_list
*left_rl
, *right_rl
;
1821 struct symbol
*type
;
1823 sval_t min_ll
, max_ll
, res_ll
;
1826 /* TODO: These things should totally be using dranges where possible */
1828 if (!left_orig
|| !right_orig
)
1832 if (type_positive_bits(rl_type(left_orig
)) > type_positive_bits(type
))
1833 type
= rl_type(left_orig
);
1834 if (type_positive_bits(rl_type(right_orig
)) > type_positive_bits(type
))
1835 type
= rl_type(right_orig
);
1837 left_rl
= cast_rl(type
, left_orig
);
1838 right_rl
= cast_rl(type
, right_orig
);
1840 max
= rl_max(left_rl
);
1841 min
= sval_type_min(type
);
1843 min_ll
= rl_min(left_rl
);
1844 min_ll
.type
= &llong_ctype
;
1845 max_ll
= rl_max(right_rl
);
1846 max_ll
.type
= &llong_ctype
;
1848 res_ll
.value
= min_ll
.value
- max_ll
.value
;
1850 if (!sval_binop_overflows(rl_min(left_rl
), '-', rl_max(right_rl
))) {
1851 tmp
= sval_binop(rl_min(left_rl
), '-', rl_max(right_rl
));
1852 if (sval_cmp(tmp
, min
) > 0)
1854 } else if (type_positive_bits(type
) < 63 &&
1855 !sval_binop_overflows(min_ll
, '-', max_ll
) &&
1856 (min
.value
!= 0 && sval_cmp(res_ll
, min
) >= 0)) {
1857 struct range_list
*left_casted
, *right_casted
, *result
;
1859 left_casted
= cast_rl(&llong_ctype
, left_orig
);
1860 right_casted
= cast_rl(&llong_ctype
, right_orig
);
1861 result
= handle_sub_rl(left_casted
, right_casted
);
1862 return cast_rl(type
, result
);
1865 if (!sval_is_max(rl_max(left_rl
))) {
1866 tmp
= sval_binop(rl_max(left_rl
), '-', rl_min(right_rl
));
1867 if (sval_cmp(tmp
, max
) < 0)
1871 if (sval_is_min(min
) && sval_is_max(max
))
1874 return alloc_rl(min
, max
);
1877 static unsigned long long rl_bits_always_set(struct range_list
*rl
)
1879 return sval_fls_mask(rl_min(rl
));
1882 static unsigned long long rl_bits_maybe_set(struct range_list
*rl
)
1884 return sval_fls_mask(rl_max(rl
));
1887 static struct range_list
*handle_OR_rl(struct range_list
*left
, struct range_list
*right
)
1889 unsigned long long left_min
, left_max
, right_min
, right_max
;
1893 if ((rl_to_sval(left
, &sval
) || rl_to_sval(right
, &sval
)) &&
1894 !sval_binop_overflows(rl_max(left
), '+', rl_max(right
)))
1895 return rl_binop(left
, '+', right
);
1897 left_min
= rl_bits_always_set(left
);
1898 left_max
= rl_bits_maybe_set(left
);
1899 right_min
= rl_bits_always_set(right
);
1900 right_max
= rl_bits_maybe_set(right
);
1902 min
.type
= max
.type
= &ullong_ctype
;
1903 min
.uvalue
= left_min
| right_min
;
1904 max
.uvalue
= left_max
| right_max
;
1906 return cast_rl(rl_type(left
), alloc_rl(min
, max
));
1909 static struct range_list
*handle_XOR_rl(struct range_list
*left
, struct range_list
*right
)
1911 unsigned long long left_set
, left_maybe
;
1912 unsigned long long right_set
, right_maybe
;
1915 left_set
= rl_bits_always_set(left
);
1916 left_maybe
= rl_bits_maybe_set(left
);
1918 right_set
= rl_bits_always_set(right
);
1919 right_maybe
= rl_bits_maybe_set(right
);
1921 zero
= max
= rl_min(left
);
1923 max
.uvalue
= fls_mask((left_maybe
| right_maybe
) ^ (left_set
& right_set
));
1925 return cast_rl(rl_type(left
), alloc_rl(zero
, max
));
1928 static sval_t
sval_lowest_set_bit(sval_t sval
)
1930 sval_t ret
= { .type
= sval
.type
};
1933 for (i
= 0; i
< 64; i
++) {
1934 if (sval
.uvalue
& 1ULL << i
) {
1935 ret
.uvalue
= (1ULL << i
);
1942 static struct range_list
*handle_AND_rl(struct range_list
*left
, struct range_list
*right
)
1944 struct bit_info
*one
, *two
;
1945 struct range_list
*rl
;
1946 sval_t min
, max
, zero
, bits_sval
;
1947 unsigned long long bits
;
1949 one
= rl_to_binfo(left
);
1950 two
= rl_to_binfo(right
);
1951 bits
= one
->possible
& two
->possible
;
1952 bits_sval
= rl_max(left
);
1953 bits_sval
.uvalue
= bits
;
1955 max
= sval_min_nonneg(rl_max(left
), rl_max(right
));
1956 min
= sval_lowest_set_bit(bits_sval
);
1958 rl
= alloc_rl(min
, max
);
1962 add_range(&rl
, zero
, zero
);
1967 static struct range_list
*handle_lshift(struct range_list
*left_orig
, struct range_list
*right_orig
)
1969 struct range_list
*left
;
1970 struct data_range
*tmp
;
1971 struct range_list
*ret
= NULL
;
1972 sval_t zero
= { .type
= rl_type(left_orig
), };
1973 sval_t shift
, min
, max
;
1974 bool add_zero
= false;
1976 if (!rl_to_sval(right_orig
, &shift
) || sval_is_negative(shift
))
1978 if (shift
.value
== 0)
1981 /* Cast to unsigned for easier left shift math */
1982 if (type_positive_bits(rl_type(left_orig
)) < 32)
1983 left
= cast_rl(&uint_ctype
, left_orig
);
1984 else if(type_positive_bits(rl_type(left_orig
)) == 63)
1985 left
= cast_rl(&ullong_ctype
, left_orig
);
1989 FOR_EACH_PTR(left
, tmp
) {
1993 if (min
.value
== 0 || max
.value
> sval_type_max(max
.type
).uvalue
>> shift
.uvalue
)
1995 if (min
.value
== 0 && max
.value
== 0)
1999 min
= sval_binop(min
, SPECIAL_LEFTSHIFT
, shift
);
2000 max
= sval_binop(max
, SPECIAL_LEFTSHIFT
, shift
);
2001 add_range(&ret
, min
, max
);
2002 } END_FOR_EACH_PTR(tmp
);
2004 if (!rl_fits_in_type(ret
, rl_type(left_orig
)))
2006 ret
= cast_rl(rl_type(left_orig
), ret
);
2008 add_range(&ret
, zero
, zero
);
2013 static struct range_list
*handle_rshift(struct range_list
*left_orig
, struct range_list
*right_orig
)
2015 struct data_range
*tmp
;
2016 struct range_list
*ret
= NULL
;
2017 sval_t shift
, min
, max
;
2019 if (!rl_to_sval(right_orig
, &shift
) || sval_is_negative(shift
))
2021 if (shift
.value
== 0)
2024 FOR_EACH_PTR(left_orig
, tmp
) {
2025 min
= sval_binop(tmp
->min
, SPECIAL_RIGHTSHIFT
, shift
);
2026 max
= sval_binop(tmp
->max
, SPECIAL_RIGHTSHIFT
, shift
);
2027 add_range(&ret
, min
, max
);
2028 } END_FOR_EACH_PTR(tmp
);
2033 struct range_list
*rl_binop(struct range_list
*left
, int op
, struct range_list
*right
)
2035 struct symbol
*cast_type
;
2036 sval_t left_sval
, right_sval
;
2037 struct range_list
*ret
= NULL
;
2039 cast_type
= rl_type(left
);
2040 if (sval_type_max(rl_type(left
)).uvalue
< sval_type_max(rl_type(right
)).uvalue
)
2041 cast_type
= rl_type(right
);
2042 if (sval_type_max(cast_type
).uvalue
< INT_MAX
)
2043 cast_type
= &int_ctype
;
2045 left
= cast_rl(cast_type
, left
);
2046 right
= cast_rl(cast_type
, right
);
2048 if (!left
&& !right
)
2051 if (rl_to_sval(left
, &left_sval
) && rl_to_sval(right
, &right_sval
)) {
2052 sval_t val
= sval_binop(left_sval
, op
, right_sval
);
2053 return alloc_rl(val
, val
);
2058 ret
= handle_mod_rl(left
, right
);
2061 ret
= handle_divide_rl(left
, right
);
2065 ret
= handle_add_mult_rl(left
, op
, right
);
2068 ret
= handle_OR_rl(left
, right
);
2071 ret
= handle_XOR_rl(left
, right
);
2074 ret
= handle_AND_rl(left
, right
);
2077 ret
= handle_sub_rl(left
, right
);
2079 case SPECIAL_RIGHTSHIFT
:
2080 return handle_rshift(left
, right
);
2081 case SPECIAL_LEFTSHIFT
:
2082 return handle_lshift(left
, right
);
2088 void free_data_info_allocs(void)
2090 struct allocator_struct
*desc
= &data_info_allocator
;
2091 struct allocation_blob
*blob
= desc
->blobs
;
2097 desc
->allocations
= 0;
2098 desc
->total_bytes
= 0;
2099 desc
->useful_bytes
= 0;
2100 desc
->freelist
= NULL
;
2102 struct allocation_blob
*next
= blob
->next
;
2103 blob_free(blob
, desc
->chunking
);
2106 clear_type_value_cache();
2107 clear_data_range_alloc();
2110 void split_comparison_rl(struct range_list
*left_orig
, int op
, struct range_list
*right_orig
,
2111 struct range_list
**left_true_rl
, struct range_list
**left_false_rl
,
2112 struct range_list
**right_true_rl
, struct range_list
**right_false_rl
)
2114 struct range_list
*left_true
, *left_false
;
2115 struct range_list
*right_true
, *right_false
;
2118 min
= sval_type_min(rl_type(left_orig
));
2119 max
= sval_type_max(rl_type(left_orig
));
2121 left_true
= clone_rl(left_orig
);
2122 left_false
= clone_rl(left_orig
);
2123 right_true
= clone_rl(right_orig
);
2124 right_false
= clone_rl(right_orig
);
2128 case SPECIAL_UNSIGNED_LT
:
2129 left_true
= remove_range(left_orig
, rl_max(right_orig
), max
);
2130 if (!sval_is_min(rl_min(right_orig
))) {
2131 left_false
= remove_range(left_orig
, min
, sub_one(rl_min(right_orig
)));
2134 right_true
= remove_range(right_orig
, min
, rl_min(left_orig
));
2135 if (!sval_is_max(rl_max(left_orig
)))
2136 right_false
= remove_range(right_orig
, add_one(rl_max(left_orig
)), max
);
2138 case SPECIAL_UNSIGNED_LTE
:
2140 if (!sval_is_max(rl_max(right_orig
)))
2141 left_true
= remove_range(left_orig
, add_one(rl_max(right_orig
)), max
);
2142 left_false
= remove_range(left_orig
, min
, rl_min(right_orig
));
2144 if (!sval_is_min(rl_min(left_orig
)))
2145 right_true
= remove_range(right_orig
, min
, sub_one(rl_min(left_orig
)));
2146 right_false
= remove_range(right_orig
, rl_max(left_orig
), max
);
2148 if (sval_cmp(rl_min(left_orig
), rl_min(right_orig
)) == 0)
2149 left_false
= remove_range(left_false
, rl_min(left_orig
), rl_min(left_orig
));
2150 if (sval_cmp(rl_max(left_orig
), rl_max(right_orig
)) == 0)
2151 right_false
= remove_range(right_false
, rl_max(left_orig
), rl_max(left_orig
));
2154 left_true
= rl_intersection(left_orig
, right_orig
);
2155 right_true
= clone_rl(left_true
);
2157 if (sval_cmp(rl_min(right_orig
), rl_max(right_orig
)) == 0)
2158 left_false
= remove_range(left_orig
, rl_min(right_orig
), rl_min(right_orig
));
2159 if (sval_cmp(rl_min(left_orig
), rl_max(left_orig
)) == 0)
2160 right_false
= remove_range(right_orig
, rl_min(left_orig
), rl_min(left_orig
));
2162 case SPECIAL_UNSIGNED_GTE
:
2164 if (!sval_is_min(rl_min(right_orig
)))
2165 left_true
= remove_range(left_orig
, min
, sub_one(rl_min(right_orig
)));
2166 left_false
= remove_range(left_orig
, rl_max(right_orig
), max
);
2168 if (!sval_is_max(rl_max(left_orig
)))
2169 right_true
= remove_range(right_orig
, add_one(rl_max(left_orig
)), max
);
2170 right_false
= remove_range(right_orig
, min
, rl_min(left_orig
));
2172 if (sval_cmp(rl_min(left_orig
), rl_min(right_orig
)) == 0)
2173 right_false
= remove_range(right_false
, rl_min(left_orig
), rl_min(left_orig
));
2174 if (sval_cmp(rl_max(left_orig
), rl_max(right_orig
)) == 0)
2175 left_false
= remove_range(left_false
, rl_max(left_orig
), rl_max(left_orig
));
2178 case SPECIAL_UNSIGNED_GT
:
2179 left_true
= remove_range(left_orig
, min
, rl_min(right_orig
));
2180 if (!sval_is_max(rl_max(right_orig
)))
2181 left_false
= remove_range(left_orig
, add_one(rl_max(right_orig
)), max
);
2183 right_true
= remove_range(right_orig
, rl_max(left_orig
), max
);
2184 if (!sval_is_min(rl_min(left_orig
)))
2185 right_false
= remove_range(right_orig
, min
, sub_one(rl_min(left_orig
)));
2187 case SPECIAL_NOTEQUAL
:
2188 left_false
= rl_intersection(left_orig
, right_orig
);
2189 right_false
= clone_rl(left_false
);
2191 if (sval_cmp(rl_min(right_orig
), rl_max(right_orig
)) == 0)
2192 left_true
= remove_range(left_orig
, rl_min(right_orig
), rl_min(right_orig
));
2193 if (sval_cmp(rl_min(left_orig
), rl_max(left_orig
)) == 0)
2194 right_true
= remove_range(right_orig
, rl_min(left_orig
), rl_min(left_orig
));
2197 sm_perror(" unhandled comparison %d", op
);
2201 *left_true_rl
= left_true
;
2202 *left_false_rl
= left_false
;
2204 if (right_true_rl
) {
2205 *right_true_rl
= right_true
;
2206 *right_false_rl
= right_false
;