2 * Copyright (C) 2009 Dan Carpenter.
4 * This program is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU General Public License
6 * as published by the Free Software Foundation; either version 2
7 * of the License, or (at your option) any later version.
9 * This program is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 * GNU General Public License for more details.
14 * You should have received a copy of the GNU General Public License
15 * along with this program; if not, see http://www.gnu.org/copyleft/gpl.txt
20 #include "smatch_extra.h"
21 #include "smatch_slist.h"
23 ALLOCATOR(data_info
, "smatch extra data");
24 ALLOCATOR(data_range
, "data range");
25 __DO_ALLOCATOR(struct data_range
, sizeof(struct data_range
), __alignof__(struct data_range
),
26 "permanent ranges", perm_data_range
);
27 __DECLARE_ALLOCATOR(struct ptr_list
, rl_ptrlist
);
29 static int handle_error_pointers(char *full
, size_t size
, struct data_range
*drange
)
32 * The kernel has error pointers where you do essentially:
34 * return (void *)(unsigned long)-12;
36 * But what I want here is to print -12 instead of the unsigned version
41 if (option_project
!= PROJ_KERNEL
)
43 if (!type_is_ptr(drange
->min
.type
))
45 if (drange
->min
.uvalue
< -4095ULL)
48 if (drange
->min
.value
== drange
->max
.value
)
49 snprintf(full
+ strlen(full
), size
- strlen(full
), "(%lld)", drange
->min
.value
);
51 snprintf(full
+ strlen(full
), size
- strlen(full
), "(%lld)-(%lld)", drange
->min
.value
, drange
->max
.value
);
55 char *show_rl(struct range_list
*list
)
57 struct data_range
*tmp
;
62 full
[sizeof(full
) - 1] = '\0';
63 FOR_EACH_PTR(list
, tmp
) {
65 strncat(full
, ",", sizeof(full
) - 1 - strlen(full
));
66 if (handle_error_pointers(full
, sizeof(full
) - 1, tmp
))
68 if (sval_cmp(tmp
->min
, tmp
->max
) == 0) {
69 strncat(full
, sval_to_str(tmp
->min
), sizeof(full
) - 1 - strlen(full
));
72 strncat(full
, sval_to_str(tmp
->min
), sizeof(full
) - 1 - strlen(full
));
73 strncat(full
, "-", sizeof(full
) - 1 - strlen(full
));
74 strncat(full
, sval_to_str(tmp
->max
), sizeof(full
) - 1 - strlen(full
));
75 } END_FOR_EACH_PTR(tmp
);
76 if (strlen(full
) == sizeof(full
) - 1)
77 full
[sizeof(full
) - 2] = '+';
78 return alloc_sname(full
);
81 void free_all_rl(void)
83 clear_rl_ptrlist_alloc();
86 static int sval_too_big(struct symbol
*type
, sval_t sval
)
88 if (type_bits(type
) >= 32 &&
89 type_bits(sval
.type
) <= type_bits(type
))
91 if (sval
.uvalue
<= ((1ULL << type_bits(type
)) - 1))
93 if (type_signed(sval
.type
)) {
94 if (type_unsigned(type
)) {
95 unsigned long long neg
= ~sval
.uvalue
;
96 if (neg
<= sval_type_max(type
).uvalue
)
99 if (sval
.value
< sval_type_min(type
).value
)
101 if (sval
.value
> sval_type_max(type
).value
)
105 if (sval
.uvalue
> sval_type_max(type
).uvalue
)
110 static int truncates_nicely(struct symbol
*type
, sval_t min
, sval_t max
)
112 unsigned long long mask
;
113 int bits
= type_bits(type
);
115 if (bits
>= type_bits(min
.type
))
118 mask
= (-1ULL >> (64 - bits
)) << (64 - bits
);
119 return (min
.uvalue
& mask
) == (max
.uvalue
& mask
);
122 static void add_range_t(struct symbol
*type
, struct range_list
**rl
, sval_t min
, sval_t max
)
124 /* If we're just adding a number, cast it and add it */
125 if (sval_cmp(min
, max
) == 0) {
126 add_range(rl
, sval_cast(type
, min
), sval_cast(type
, max
));
130 /* If the range is within the type range then add it */
131 if (sval_fits(type
, min
) && sval_fits(type
, max
)) {
132 add_range(rl
, sval_cast(type
, min
), sval_cast(type
, max
));
136 if (truncates_nicely(type
, min
, max
)) {
137 add_range(rl
, sval_cast(type
, min
), sval_cast(type
, max
));
142 * If the range we are adding has more bits than the range type then
143 * add the whole range type. Eg:
144 * 0x8000000000000000 - 0xf000000000000000 -> cast to int
147 if (sval_too_big(type
, min
) || sval_too_big(type
, max
)) {
148 add_range(rl
, sval_type_min(type
), sval_type_max(type
));
152 /* Cast negative values to high positive values */
153 if (sval_is_negative(min
) && type_unsigned(type
)) {
154 if (sval_is_positive(max
)) {
155 if (sval_too_high(type
, max
)) {
156 add_range(rl
, sval_type_min(type
), sval_type_max(type
));
159 add_range(rl
, sval_type_val(type
, 0), sval_cast(type
, max
));
160 max
= sval_type_max(type
);
162 max
= sval_cast(type
, max
);
164 min
= sval_cast(type
, min
);
165 add_range(rl
, min
, max
);
168 /* Cast high positive numbers to negative */
169 if (sval_unsigned(max
) && sval_is_negative(sval_cast(type
, max
))) {
170 if (!sval_is_negative(sval_cast(type
, min
))) {
171 add_range(rl
, sval_cast(type
, min
), sval_type_max(type
));
172 min
= sval_type_min(type
);
174 min
= sval_cast(type
, min
);
176 max
= sval_cast(type
, max
);
177 add_range(rl
, min
, max
);
180 add_range(rl
, sval_cast(type
, min
), sval_cast(type
, max
));
184 static int str_to_comparison_arg_helper(const char *str
,
185 struct expression
*call
, int *comparison
,
186 struct expression
**arg
, const char **endp
)
198 *comparison
= SPECIAL_LTE
;
203 } else if (*c
== '=') {
206 *comparison
= SPECIAL_EQUAL
;
207 } else if (*c
== '>') {
210 *comparison
= SPECIAL_GTE
;
215 } else if (*c
== '!') {
218 *comparison
= SPECIAL_NOTEQUAL
;
227 param
= strtoll(c
, (char **)&c
, 10);
229 c
++; /* skip the ']' character */
235 *arg
= get_argument_from_call_expr(call
->args
, param
);
238 if (*c
== '-' && *(c
+ 1) == '>') {
242 n
= snprintf(buf
, sizeof(buf
), "$%s", c
);
243 if (n
>= sizeof(buf
))
245 if (buf
[n
- 1] == ']')
247 *arg
= gen_expression_from_key(*arg
, buf
);
248 while (*c
&& *c
!= ']')
254 int str_to_comparison_arg(const char *str
, struct expression
*call
, int *comparison
, struct expression
**arg
)
263 return str_to_comparison_arg_helper(str
, call
, comparison
, arg
, NULL
);
266 static int get_val_from_key(int use_max
, struct symbol
*type
, const char *c
, struct expression
*call
, const char **endp
, sval_t
*sval
)
268 struct expression
*arg
;
273 ret
= sval_type_max(type
);
275 ret
= sval_type_min(type
);
277 if (!str_to_comparison_arg_helper(c
, call
, &comparison
, &arg
, endp
)) {
282 if (use_max
&& get_implied_max(arg
, &tmp
)) {
284 if (comparison
== '<') {
286 ret
= sval_binop(ret
, '-', tmp
);
289 if (!use_max
&& get_implied_min(arg
, &tmp
)) {
291 if (comparison
== '>') {
293 ret
= sval_binop(ret
, '+', tmp
);
301 static sval_t
add_one(sval_t sval
)
307 static sval_t
sub_one(sval_t sval
)
313 void filter_by_comparison(struct range_list
**rl
, int comparison
, struct range_list
*right
)
315 struct range_list
*left_orig
= *rl
;
316 struct range_list
*right_orig
= right
;
317 struct range_list
*ret_rl
= *rl
;
318 struct symbol
*cast_type
;
321 cast_type
= rl_type(left_orig
);
322 if (sval_type_max(rl_type(left_orig
)).uvalue
< sval_type_max(rl_type(right_orig
)).uvalue
)
323 cast_type
= rl_type(right_orig
);
324 if (sval_type_max(cast_type
).uvalue
< INT_MAX
)
325 cast_type
= &int_ctype
;
327 min
= sval_type_min(cast_type
);
328 max
= sval_type_max(cast_type
);
329 left_orig
= cast_rl(cast_type
, left_orig
);
330 right_orig
= cast_rl(cast_type
, right_orig
);
332 switch (comparison
) {
334 case SPECIAL_UNSIGNED_LT
:
335 ret_rl
= remove_range(left_orig
, rl_max(right_orig
), max
);
338 case SPECIAL_UNSIGNED_LTE
:
339 if (!sval_is_max(rl_max(right_orig
)))
340 ret_rl
= remove_range(left_orig
, add_one(rl_max(right_orig
)), max
);
343 ret_rl
= rl_intersection(left_orig
, right_orig
);
346 case SPECIAL_UNSIGNED_GTE
:
347 if (!sval_is_min(rl_min(right_orig
)))
348 ret_rl
= remove_range(left_orig
, min
, sub_one(rl_min(right_orig
)));
351 case SPECIAL_UNSIGNED_GT
:
352 ret_rl
= remove_range(left_orig
, min
, rl_min(right_orig
));
354 case SPECIAL_NOTEQUAL
:
355 if (sval_cmp(rl_min(right_orig
), rl_max(right_orig
)) == 0)
356 ret_rl
= remove_range(left_orig
, rl_min(right_orig
), rl_min(right_orig
));
359 sm_perror("unhandled comparison %s", show_special(comparison
));
363 *rl
= cast_rl(rl_type(*rl
), ret_rl
);
366 static struct range_list
*filter_by_comparison_call(const char *c
, struct expression
*call
, const char **endp
, struct range_list
*start_rl
)
369 struct expression
*arg
;
370 struct range_list
*casted_start
, *right_orig
;
373 if (!str_to_comparison_arg_helper(c
, call
, &comparison
, &arg
, endp
))
376 if (!get_implied_rl(arg
, &right_orig
))
380 if (type_positive_bits(rl_type(start_rl
)) > type_positive_bits(type
))
381 type
= rl_type(start_rl
);
382 if (type_positive_bits(rl_type(right_orig
)) > type_positive_bits(type
))
383 type
= rl_type(right_orig
);
385 casted_start
= cast_rl(type
, start_rl
);
386 right_orig
= cast_rl(type
, right_orig
);
388 filter_by_comparison(&casted_start
, comparison
, right_orig
);
389 return cast_rl(rl_type(start_rl
), casted_start
);
392 static sval_t
parse_val(int use_max
, struct expression
*call
, struct symbol
*type
, const char *c
, const char **endp
)
394 const char *start
= c
;
397 if (!strncmp(start
, "max", 3)) {
398 ret
= sval_type_max(type
);
400 } else if (!strncmp(start
, "u64max", 6)) {
401 ret
= sval_type_val(type
, ULLONG_MAX
);
403 } else if (!strncmp(start
, "s64max", 6)) {
404 ret
= sval_type_val(type
, LLONG_MAX
);
406 } else if (!strncmp(start
, "u32max", 6)) {
407 ret
= sval_type_val(type
, UINT_MAX
);
409 } else if (!strncmp(start
, "s32max", 6)) {
410 ret
= sval_type_val(type
, INT_MAX
);
412 } else if (!strncmp(start
, "u16max", 6)) {
413 ret
= sval_type_val(type
, USHRT_MAX
);
415 } else if (!strncmp(start
, "s16max", 6)) {
416 ret
= sval_type_val(type
, SHRT_MAX
);
418 } else if (!strncmp(start
, "min", 3)) {
419 ret
= sval_type_min(type
);
421 } else if (!strncmp(start
, "s64min", 6)) {
422 ret
= sval_type_val(type
, LLONG_MIN
);
424 } else if (!strncmp(start
, "s32min", 6)) {
425 ret
= sval_type_val(type
, INT_MIN
);
427 } else if (!strncmp(start
, "s16min", 6)) {
428 ret
= sval_type_val(type
, SHRT_MIN
);
430 } else if (!strncmp(start
, "long_min", 8)) {
431 ret
= sval_type_val(type
, LONG_MIN
);
433 } else if (!strncmp(start
, "long_max", 8)) {
434 ret
= sval_type_val(type
, LONG_MAX
);
436 } else if (!strncmp(start
, "ulong_max", 9)) {
437 ret
= sval_type_val(type
, ULONG_MAX
);
439 } else if (!strncmp(start
, "ptr_max", 7)) {
440 ret
= sval_type_val(type
, valid_ptr_max
);
442 } else if (start
[0] == '[') {
443 /* this parses [==p0] comparisons */
444 get_val_from_key(1, type
, start
, call
, &c
, &ret
);
445 } else if (type_positive_bits(type
) == 64) {
446 ret
= sval_type_val(type
, strtoull(start
, (char **)&c
, 0));
448 ret
= sval_type_val(type
, strtoll(start
, (char **)&c
, 0));
454 static const char *jump_to_call_math(const char *value
)
456 const char *c
= value
;
458 while (*c
&& *c
!= '[')
464 if (*c
== '<' || *c
== '=' || *c
== '>' || *c
== '!')
470 static void str_to_rl_helper(struct expression
*call
, struct symbol
*type
, const char *str
, const char **endp
, struct range_list
**rl
)
472 struct range_list
*rl_tmp
= NULL
;
473 sval_t prev_min
, min
, max
;
476 prev_min
= sval_type_min(type
);
477 min
= sval_type_min(type
);
478 max
= sval_type_max(type
);
480 while (*c
!= '\0' && *c
!= '[') {
482 if (sval_cmp(min
, sval_type_min(type
)) != 0)
484 max
= sval_type_max(type
);
485 add_range_t(type
, &rl_tmp
, min
, max
);
490 min
= parse_val(0, call
, type
, c
, &c
);
491 if (!sval_fits(type
, min
))
492 min
= sval_type_min(type
);
496 if (*c
== '\0' || *c
== '[') {
497 add_range_t(type
, &rl_tmp
, min
, min
);
501 add_range_t(type
, &rl_tmp
, min
, min
);
507 max
= sval_type_max(type
);
508 add_range_t(type
, &rl_tmp
, min
, max
);
510 if (*c
== '[' || *c
== '\0')
514 sm_msg("debug XXX: trouble parsing %s c = %s", str
, c
);
520 max
= parse_val(1, call
, type
, c
, &c
);
521 if (!sval_fits(type
, max
))
522 max
= sval_type_max(type
);
524 max
= sval_type_max(type
);
525 add_range_t(type
, &rl_tmp
, min
, max
);
527 if (*c
== '[' || *c
== '\0')
531 add_range_t(type
, &rl_tmp
, min
, max
);
542 static void str_to_dinfo(struct expression
*call
, struct symbol
*type
, const char *value
, struct data_info
*dinfo
)
544 struct range_list
*math_rl
;
545 const char *call_math
;
547 struct range_list
*rl
= NULL
;
552 if (strcmp(value
, "empty") == 0)
555 if (strncmp(value
, "[==$", 4) == 0) {
556 struct expression
*arg
;
559 if (!str_to_comparison_arg(value
, call
, &comparison
, &arg
))
561 if (!get_implied_rl(arg
, &rl
))
566 str_to_rl_helper(call
, type
, value
, &c
, &rl
);
570 call_math
= jump_to_call_math(value
);
571 if (call_math
&& parse_call_math_rl(call
, call_math
, &math_rl
)) {
572 rl
= rl_intersection(rl
, math_rl
);
577 * For now if we already tried to handle the call math and couldn't
578 * figure it out then bail.
580 if (jump_to_call_math(c
) == c
+ 1)
583 rl
= filter_by_comparison_call(c
, call
, &c
, rl
);
586 rl
= cast_rl(type
, rl
);
587 dinfo
->value_ranges
= rl
;
590 void str_to_rl(struct symbol
*type
, char *value
, struct range_list
**rl
)
592 struct data_info dinfo
= {};
594 str_to_dinfo(NULL
, type
, value
, &dinfo
);
595 *rl
= dinfo
.value_ranges
;
598 void call_results_to_rl(struct expression
*expr
, struct symbol
*type
, const char *value
, struct range_list
**rl
)
600 struct data_info dinfo
= {};
602 str_to_dinfo(strip_expr(expr
), type
, value
, &dinfo
);
603 *rl
= dinfo
.value_ranges
;
606 int is_whole_rl(struct range_list
*rl
)
608 struct data_range
*drange
;
610 if (ptr_list_empty(rl
))
612 drange
= first_ptr_list((struct ptr_list
*)rl
);
613 if (sval_is_min(drange
->min
) && sval_is_max(drange
->max
))
618 int is_unknown_ptr(struct range_list
*rl
)
620 struct data_range
*drange
;
626 FOR_EACH_PTR(rl
, drange
) {
629 if (sval_cmp(drange
->min
, valid_ptr_min_sval
) == 0 &&
630 sval_cmp(drange
->max
, valid_ptr_max_sval
) == 0)
632 } END_FOR_EACH_PTR(drange
);
637 int is_whole_rl_non_zero(struct range_list
*rl
)
639 struct data_range
*drange
;
641 if (ptr_list_empty(rl
))
643 drange
= first_ptr_list((struct ptr_list
*)rl
);
644 if (sval_unsigned(drange
->min
) &&
645 drange
->min
.value
== 1 &&
646 sval_is_max(drange
->max
))
648 if (!sval_is_min(drange
->min
) || drange
->max
.value
!= -1)
650 drange
= last_ptr_list((struct ptr_list
*)rl
);
651 if (drange
->min
.value
!= 1 || !sval_is_max(drange
->max
))
656 sval_t
rl_min(struct range_list
*rl
)
658 struct data_range
*drange
;
661 ret
.type
= &llong_ctype
;
662 ret
.value
= LLONG_MIN
;
663 if (ptr_list_empty(rl
))
665 drange
= first_ptr_list((struct ptr_list
*)rl
);
669 sval_t
rl_max(struct range_list
*rl
)
671 struct data_range
*drange
;
674 ret
.type
= &llong_ctype
;
675 ret
.value
= LLONG_MAX
;
676 if (ptr_list_empty(rl
))
678 drange
= last_ptr_list((struct ptr_list
*)rl
);
682 int rl_to_sval(struct range_list
*rl
, sval_t
*sval
)
691 if (sval_cmp(min
, max
) != 0)
697 struct symbol
*rl_type(struct range_list
*rl
)
701 return rl_min(rl
).type
;
704 static struct data_range
*alloc_range_helper_sval(sval_t min
, sval_t max
, int perm
)
706 struct data_range
*ret
;
709 ret
= __alloc_perm_data_range(0);
711 ret
= __alloc_data_range(0);
717 struct data_range
*alloc_range(sval_t min
, sval_t max
)
719 return alloc_range_helper_sval(min
, max
, 0);
722 struct data_range
*alloc_range_perm(sval_t min
, sval_t max
)
724 return alloc_range_helper_sval(min
, max
, 1);
727 struct range_list
*alloc_rl(sval_t min
, sval_t max
)
729 struct range_list
*rl
= NULL
;
731 if (sval_cmp(min
, max
) > 0)
732 return alloc_whole_rl(min
.type
);
734 add_range(&rl
, min
, max
);
738 struct range_list
*alloc_whole_rl(struct symbol
*type
)
740 if (!type
|| type_positive_bits(type
) < 0)
742 if (type
->type
== SYM_ARRAY
)
745 return alloc_rl(sval_type_min(type
), sval_type_max(type
));
748 extern int rl_ptrlist_hack
;
749 void add_range(struct range_list
**list
, sval_t min
, sval_t max
)
751 struct data_range
*tmp
;
752 struct data_range
*new = NULL
;
756 * There is at least on valid reason why the types might be confusing
757 * and that's when you have a void pointer and on some paths you treat
758 * it as a u8 pointer and on other paths you treat it as a u16 pointer.
759 * This case is hard to deal with.
761 * There are other cases where we probably should be more specific about
762 * the types than we are. For example, we end up merging a lot of ulong
763 * with pointers and I have not figured out why we do that.
765 * But this hack works for both cases, I think. We cast it to pointers
766 * or we use the bigger size.
769 if (*list
&& rl_type(*list
) != min
.type
) {
770 if (rl_type(*list
)->type
== SYM_PTR
) {
771 min
= sval_cast(rl_type(*list
), min
);
772 max
= sval_cast(rl_type(*list
), max
);
773 } else if (min
.type
->type
== SYM_PTR
) {
774 *list
= cast_rl(min
.type
, *list
);
775 } else if (type_bits(rl_type(*list
)) >= type_bits(min
.type
)) {
776 min
= sval_cast(rl_type(*list
), min
);
777 max
= sval_cast(rl_type(*list
), max
);
779 *list
= cast_rl(min
.type
, *list
);
783 if (sval_cmp(min
, max
) > 0) {
784 min
= sval_type_min(min
.type
);
785 max
= sval_type_max(min
.type
);
789 * FIXME: This has a problem merging a range_list like: min-0,3-max
790 * with a range like 1-2. You end up with min-2,3-max instead of
793 FOR_EACH_PTR(*list
, tmp
) {
795 /* Sometimes we overlap with more than one range
796 so we have to delete or modify the next range. */
797 if (!sval_is_max(max
) && max
.value
+ 1 == tmp
->min
.value
) {
798 /* join 2 ranges here */
800 DELETE_CURRENT_PTR(tmp
);
804 /* Doesn't overlap with the next one. */
805 if (sval_cmp(max
, tmp
->min
) < 0)
808 if (sval_cmp(max
, tmp
->max
) <= 0) {
809 /* Partially overlaps the next one. */
811 DELETE_CURRENT_PTR(tmp
);
814 /* Completely overlaps the next one. */
815 DELETE_CURRENT_PTR(tmp
);
816 /* there could be more ranges to delete */
820 if (!sval_is_max(max
) && max
.value
+ 1 == tmp
->min
.value
) {
821 /* join 2 ranges into a big range */
822 new = alloc_range(min
, tmp
->max
);
823 REPLACE_CURRENT_PTR(tmp
, new);
826 if (sval_cmp(max
, tmp
->min
) < 0) { /* new range entirely below */
827 new = alloc_range(min
, max
);
828 INSERT_CURRENT(new, tmp
);
831 if (sval_cmp(min
, tmp
->min
) < 0) { /* new range partially below */
832 if (sval_cmp(max
, tmp
->max
) < 0)
836 new = alloc_range(min
, max
);
837 REPLACE_CURRENT_PTR(tmp
, new);
842 if (sval_cmp(max
, tmp
->max
) <= 0) /* new range already included */
844 if (sval_cmp(min
, tmp
->max
) <= 0) { /* new range partially above */
846 new = alloc_range(min
, max
);
847 REPLACE_CURRENT_PTR(tmp
, new);
851 if (!sval_is_min(min
) && min
.value
- 1 == tmp
->max
.value
) {
852 /* join 2 ranges into a big range */
853 new = alloc_range(tmp
->min
, max
);
854 REPLACE_CURRENT_PTR(tmp
, new);
858 /* the new range is entirely above the existing ranges */
859 } END_FOR_EACH_PTR(tmp
);
862 new = alloc_range(min
, max
);
865 add_ptr_list(list
, new);
869 struct range_list
*clone_rl(struct range_list
*list
)
871 struct data_range
*tmp
;
872 struct range_list
*ret
= NULL
;
874 FOR_EACH_PTR(list
, tmp
) {
875 add_ptr_list(&ret
, tmp
);
876 } END_FOR_EACH_PTR(tmp
);
880 struct range_list
*clone_rl_permanent(struct range_list
*list
)
882 struct data_range
*tmp
;
883 struct data_range
*new;
884 struct range_list
*ret
= NULL
;
886 FOR_EACH_PTR(list
, tmp
) {
887 new = alloc_range_perm(tmp
->min
, tmp
->max
);
888 add_ptr_list(&ret
, new);
889 } END_FOR_EACH_PTR(tmp
);
893 struct range_list
*rl_union(struct range_list
*one
, struct range_list
*two
)
895 struct data_range
*tmp
;
896 struct range_list
*ret
= NULL
;
898 FOR_EACH_PTR(one
, tmp
) {
899 add_range(&ret
, tmp
->min
, tmp
->max
);
900 } END_FOR_EACH_PTR(tmp
);
901 FOR_EACH_PTR(two
, tmp
) {
902 add_range(&ret
, tmp
->min
, tmp
->max
);
903 } END_FOR_EACH_PTR(tmp
);
907 struct range_list
*remove_range(struct range_list
*list
, sval_t min
, sval_t max
)
909 struct data_range
*tmp
;
910 struct range_list
*ret
= NULL
;
915 min
= sval_cast(rl_type(list
), min
);
916 max
= sval_cast(rl_type(list
), max
);
917 if (sval_cmp(min
, max
) > 0) {
923 FOR_EACH_PTR(list
, tmp
) {
924 if (sval_cmp(tmp
->max
, min
) < 0) {
925 add_range(&ret
, tmp
->min
, tmp
->max
);
928 if (sval_cmp(tmp
->min
, max
) > 0) {
929 add_range(&ret
, tmp
->min
, tmp
->max
);
932 if (sval_cmp(tmp
->min
, min
) >= 0 && sval_cmp(tmp
->max
, max
) <= 0)
934 if (sval_cmp(tmp
->min
, min
) >= 0) {
936 add_range(&ret
, max
, tmp
->max
);
937 } else if (sval_cmp(tmp
->max
, max
) <= 0) {
939 add_range(&ret
, tmp
->min
, min
);
943 add_range(&ret
, tmp
->min
, min
);
944 add_range(&ret
, max
, tmp
->max
);
946 } END_FOR_EACH_PTR(tmp
);
950 int ranges_equiv(struct data_range
*one
, struct data_range
*two
)
956 if (sval_cmp(one
->min
, two
->min
) != 0)
958 if (sval_cmp(one
->max
, two
->max
) != 0)
963 int rl_equiv(struct range_list
*one
, struct range_list
*two
)
965 struct data_range
*one_range
;
966 struct data_range
*two_range
;
971 PREPARE_PTR_LIST(one
, one_range
);
972 PREPARE_PTR_LIST(two
, two_range
);
974 if (!one_range
&& !two_range
)
976 if (!ranges_equiv(one_range
, two_range
))
978 NEXT_PTR_LIST(one_range
);
979 NEXT_PTR_LIST(two_range
);
981 FINISH_PTR_LIST(two_range
);
982 FINISH_PTR_LIST(one_range
);
987 int true_comparison_range(struct data_range
*left
, int comparison
, struct data_range
*right
)
989 switch (comparison
) {
991 case SPECIAL_UNSIGNED_LT
:
992 if (sval_cmp(left
->min
, right
->max
) < 0)
995 case SPECIAL_UNSIGNED_LTE
:
997 if (sval_cmp(left
->min
, right
->max
) <= 0)
1001 if (sval_cmp(left
->max
, right
->min
) < 0)
1003 if (sval_cmp(left
->min
, right
->max
) > 0)
1006 case SPECIAL_UNSIGNED_GTE
:
1008 if (sval_cmp(left
->max
, right
->min
) >= 0)
1012 case SPECIAL_UNSIGNED_GT
:
1013 if (sval_cmp(left
->max
, right
->min
) > 0)
1016 case SPECIAL_NOTEQUAL
:
1017 if (sval_cmp(left
->min
, left
->max
) != 0)
1019 if (sval_cmp(right
->min
, right
->max
) != 0)
1021 if (sval_cmp(left
->min
, right
->min
) != 0)
1025 sm_perror("unhandled comparison %d", comparison
);
1031 int true_comparison_range_LR(int comparison
, struct data_range
*var
, struct data_range
*val
, int left
)
1034 return true_comparison_range(var
, comparison
, val
);
1036 return true_comparison_range(val
, comparison
, var
);
1039 static int false_comparison_range_sval(struct data_range
*left
, int comparison
, struct data_range
*right
)
1041 switch (comparison
) {
1043 case SPECIAL_UNSIGNED_LT
:
1044 if (sval_cmp(left
->max
, right
->min
) >= 0)
1047 case SPECIAL_UNSIGNED_LTE
:
1049 if (sval_cmp(left
->max
, right
->min
) > 0)
1053 if (sval_cmp(left
->min
, left
->max
) != 0)
1055 if (sval_cmp(right
->min
, right
->max
) != 0)
1057 if (sval_cmp(left
->min
, right
->min
) != 0)
1060 case SPECIAL_UNSIGNED_GTE
:
1062 if (sval_cmp(left
->min
, right
->max
) < 0)
1066 case SPECIAL_UNSIGNED_GT
:
1067 if (sval_cmp(left
->min
, right
->max
) <= 0)
1070 case SPECIAL_NOTEQUAL
:
1071 if (sval_cmp(left
->max
, right
->min
) < 0)
1073 if (sval_cmp(left
->min
, right
->max
) > 0)
1077 sm_perror("unhandled comparison %d", comparison
);
1083 int false_comparison_range_LR(int comparison
, struct data_range
*var
, struct data_range
*val
, int left
)
1086 return false_comparison_range_sval(var
, comparison
, val
);
1088 return false_comparison_range_sval(val
, comparison
, var
);
1091 int possibly_true(struct expression
*left
, int comparison
, struct expression
*right
)
1093 struct range_list
*rl_left
, *rl_right
;
1094 struct data_range
*tmp_left
, *tmp_right
;
1095 struct symbol
*type
;
1097 if (!get_implied_rl(left
, &rl_left
))
1099 if (!get_implied_rl(right
, &rl_right
))
1102 type
= rl_type(rl_left
);
1103 if (type_positive_bits(type
) < type_positive_bits(rl_type(rl_right
)))
1104 type
= rl_type(rl_right
);
1105 if (type_positive_bits(type
) < 31)
1108 rl_left
= cast_rl(type
, rl_left
);
1109 rl_right
= cast_rl(type
, rl_right
);
1111 FOR_EACH_PTR(rl_left
, tmp_left
) {
1112 FOR_EACH_PTR(rl_right
, tmp_right
) {
1113 if (true_comparison_range(tmp_left
, comparison
, tmp_right
))
1115 } END_FOR_EACH_PTR(tmp_right
);
1116 } END_FOR_EACH_PTR(tmp_left
);
1120 int possibly_false(struct expression
*left
, int comparison
, struct expression
*right
)
1122 struct range_list
*rl_left
, *rl_right
;
1123 struct data_range
*tmp_left
, *tmp_right
;
1124 struct symbol
*type
;
1126 if (!get_implied_rl(left
, &rl_left
))
1128 if (!get_implied_rl(right
, &rl_right
))
1131 type
= rl_type(rl_left
);
1132 if (type_positive_bits(type
) < type_positive_bits(rl_type(rl_right
)))
1133 type
= rl_type(rl_right
);
1134 if (type_positive_bits(type
) < 31)
1137 rl_left
= cast_rl(type
, rl_left
);
1138 rl_right
= cast_rl(type
, rl_right
);
1140 FOR_EACH_PTR(rl_left
, tmp_left
) {
1141 FOR_EACH_PTR(rl_right
, tmp_right
) {
1142 if (false_comparison_range_sval(tmp_left
, comparison
, tmp_right
))
1144 } END_FOR_EACH_PTR(tmp_right
);
1145 } END_FOR_EACH_PTR(tmp_left
);
1149 int possibly_true_rl(struct range_list
*left_ranges
, int comparison
, struct range_list
*right_ranges
)
1151 struct data_range
*left_tmp
, *right_tmp
;
1152 struct symbol
*type
;
1154 if (!left_ranges
|| !right_ranges
)
1157 type
= rl_type(left_ranges
);
1158 if (type_positive_bits(type
) < type_positive_bits(rl_type(right_ranges
)))
1159 type
= rl_type(right_ranges
);
1160 if (type_positive_bits(type
) < 31)
1163 left_ranges
= cast_rl(type
, left_ranges
);
1164 right_ranges
= cast_rl(type
, right_ranges
);
1166 FOR_EACH_PTR(left_ranges
, left_tmp
) {
1167 FOR_EACH_PTR(right_ranges
, right_tmp
) {
1168 if (true_comparison_range(left_tmp
, comparison
, right_tmp
))
1170 } END_FOR_EACH_PTR(right_tmp
);
1171 } END_FOR_EACH_PTR(left_tmp
);
1175 int possibly_false_rl(struct range_list
*left_ranges
, int comparison
, struct range_list
*right_ranges
)
1177 struct data_range
*left_tmp
, *right_tmp
;
1178 struct symbol
*type
;
1180 if (!left_ranges
|| !right_ranges
)
1183 type
= rl_type(left_ranges
);
1184 if (type_positive_bits(type
) < type_positive_bits(rl_type(right_ranges
)))
1185 type
= rl_type(right_ranges
);
1186 if (type_positive_bits(type
) < 31)
1189 left_ranges
= cast_rl(type
, left_ranges
);
1190 right_ranges
= cast_rl(type
, right_ranges
);
1192 FOR_EACH_PTR(left_ranges
, left_tmp
) {
1193 FOR_EACH_PTR(right_ranges
, right_tmp
) {
1194 if (false_comparison_range_sval(left_tmp
, comparison
, right_tmp
))
1196 } END_FOR_EACH_PTR(right_tmp
);
1197 } END_FOR_EACH_PTR(left_tmp
);
1201 /* FIXME: the _rl here stands for right left so really it should be _lr */
1202 int possibly_true_rl_LR(int comparison
, struct range_list
*a
, struct range_list
*b
, int left
)
1205 return possibly_true_rl(a
, comparison
, b
);
1207 return possibly_true_rl(b
, comparison
, a
);
1210 int possibly_false_rl_LR(int comparison
, struct range_list
*a
, struct range_list
*b
, int left
)
1213 return possibly_false_rl(a
, comparison
, b
);
1215 return possibly_false_rl(b
, comparison
, a
);
1218 int rl_has_sval(struct range_list
*rl
, sval_t sval
)
1220 struct data_range
*tmp
;
1222 FOR_EACH_PTR(rl
, tmp
) {
1223 if (sval_cmp(tmp
->min
, sval
) <= 0 &&
1224 sval_cmp(tmp
->max
, sval
) >= 0)
1226 } END_FOR_EACH_PTR(tmp
);
1230 void tack_on(struct range_list
**list
, struct data_range
*drange
)
1232 add_ptr_list(list
, drange
);
1235 void push_rl(struct range_list_stack
**rl_stack
, struct range_list
*rl
)
1237 add_ptr_list(rl_stack
, rl
);
1240 struct range_list
*pop_rl(struct range_list_stack
**rl_stack
)
1242 struct range_list
*rl
;
1244 rl
= last_ptr_list((struct ptr_list
*)*rl_stack
);
1245 delete_ptr_list_last((struct ptr_list
**)rl_stack
);
1249 struct range_list
*top_rl(struct range_list_stack
*rl_stack
)
1251 struct range_list
*rl
;
1253 rl
= last_ptr_list((struct ptr_list
*)rl_stack
);
1257 void filter_top_rl(struct range_list_stack
**rl_stack
, struct range_list
*filter
)
1259 struct range_list
*rl
;
1261 rl
= pop_rl(rl_stack
);
1262 rl
= rl_filter(rl
, filter
);
1263 push_rl(rl_stack
, rl
);
1266 struct range_list
*rl_truncate_cast(struct symbol
*type
, struct range_list
*rl
)
1268 struct data_range
*tmp
;
1269 struct range_list
*ret
= NULL
;
1275 if (!type
|| type
== rl_type(rl
))
1278 FOR_EACH_PTR(rl
, tmp
) {
1281 if (type_bits(type
) < type_bits(rl_type(rl
))) {
1282 min
.uvalue
= tmp
->min
.uvalue
& ((1ULL << type_bits(type
)) - 1);
1283 max
.uvalue
= tmp
->max
.uvalue
& ((1ULL << type_bits(type
)) - 1);
1285 if (sval_cmp(min
, max
) > 0) {
1286 min
= sval_cast(type
, min
);
1287 max
= sval_cast(type
, max
);
1289 add_range_t(type
, &ret
, min
, max
);
1290 } END_FOR_EACH_PTR(tmp
);
1295 static int rl_is_sane(struct range_list
*rl
)
1297 struct data_range
*tmp
;
1298 struct symbol
*type
;
1301 FOR_EACH_PTR(rl
, tmp
) {
1302 if (!sval_fits(type
, tmp
->min
))
1304 if (!sval_fits(type
, tmp
->max
))
1306 if (sval_cmp(tmp
->min
, tmp
->max
) > 0)
1308 } END_FOR_EACH_PTR(tmp
);
1313 static int rl_type_consistent(struct range_list
*rl
)
1315 struct data_range
*tmp
;
1316 struct symbol
*type
;
1319 FOR_EACH_PTR(rl
, tmp
) {
1320 if (type
!= tmp
->min
.type
|| type
!= tmp
->max
.type
)
1322 } END_FOR_EACH_PTR(tmp
);
1326 static struct range_list
*cast_to_bool(struct range_list
*rl
)
1328 struct data_range
*tmp
;
1329 struct range_list
*ret
= NULL
;
1332 sval_t min
= { .type
= &bool_ctype
};
1333 sval_t max
= { .type
= &bool_ctype
};
1335 FOR_EACH_PTR(rl
, tmp
) {
1336 if (tmp
->min
.value
|| tmp
->max
.value
)
1338 if (sval_is_negative(tmp
->min
) &&
1339 sval_is_negative(tmp
->max
))
1341 if (tmp
->min
.value
== 0 ||
1342 tmp
->max
.value
== 0)
1344 if (sval_is_negative(tmp
->min
) &&
1347 } END_FOR_EACH_PTR(tmp
);
1354 add_range(&ret
, min
, max
);
1358 struct range_list
*cast_rl(struct symbol
*type
, struct range_list
*rl
)
1360 struct data_range
*tmp
;
1361 struct range_list
*ret
= NULL
;
1368 if (!rl_is_sane(rl
))
1369 return alloc_whole_rl(type
);
1370 if (type
== rl_type(rl
) && rl_type_consistent(rl
))
1373 if (type
== &bool_ctype
)
1374 return cast_to_bool(rl
);
1376 FOR_EACH_PTR(rl
, tmp
) {
1377 add_range_t(type
, &ret
, tmp
->min
, tmp
->max
);
1378 } END_FOR_EACH_PTR(tmp
);
1381 return alloc_whole_rl(type
);
1386 struct range_list
*rl_invert(struct range_list
*orig
)
1388 struct range_list
*ret
= NULL
;
1389 struct data_range
*tmp
;
1390 sval_t gap_min
, abs_max
, sval
;
1394 if (type_bits(rl_type(orig
)) < 0) /* void type mostly */
1397 gap_min
= sval_type_min(rl_min(orig
).type
);
1398 abs_max
= sval_type_max(rl_max(orig
).type
);
1400 FOR_EACH_PTR(orig
, tmp
) {
1401 if (sval_cmp(tmp
->min
, gap_min
) > 0) {
1402 sval
= sval_type_val(tmp
->min
.type
, tmp
->min
.value
- 1);
1403 add_range(&ret
, gap_min
, sval
);
1405 if (sval_cmp(tmp
->max
, abs_max
) == 0)
1407 gap_min
= sval_type_val(tmp
->max
.type
, tmp
->max
.value
+ 1);
1408 } END_FOR_EACH_PTR(tmp
);
1410 if (sval_cmp(gap_min
, abs_max
) <= 0)
1411 add_range(&ret
, gap_min
, abs_max
);
1416 struct range_list
*rl_filter(struct range_list
*rl
, struct range_list
*filter
)
1418 struct data_range
*tmp
;
1420 FOR_EACH_PTR(filter
, tmp
) {
1421 rl
= remove_range(rl
, tmp
->min
, tmp
->max
);
1422 } END_FOR_EACH_PTR(tmp
);
1427 struct range_list
*rl_intersection(struct range_list
*one
, struct range_list
*two
)
1429 struct range_list
*one_orig
;
1430 struct range_list
*two_orig
;
1431 struct range_list
*ret
;
1432 struct symbol
*ret_type
;
1433 struct symbol
*small_type
;
1434 struct symbol
*large_type
;
1444 ret_type
= rl_type(one
);
1445 small_type
= rl_type(one
);
1446 large_type
= rl_type(two
);
1448 if (type_bits(rl_type(two
)) < type_bits(small_type
)) {
1449 small_type
= rl_type(two
);
1450 large_type
= rl_type(one
);
1453 one
= cast_rl(large_type
, one
);
1454 two
= cast_rl(large_type
, two
);
1457 one
= rl_invert(one
);
1458 two
= rl_invert(two
);
1460 ret
= rl_filter(ret
, one
);
1461 ret
= rl_filter(ret
, two
);
1463 one
= cast_rl(small_type
, one_orig
);
1464 two
= cast_rl(small_type
, two_orig
);
1466 one
= rl_invert(one
);
1467 two
= rl_invert(two
);
1469 ret
= cast_rl(small_type
, ret
);
1470 ret
= rl_filter(ret
, one
);
1471 ret
= rl_filter(ret
, two
);
1473 return cast_rl(ret_type
, ret
);
1476 static struct range_list
*handle_mod_rl(struct range_list
*left
, struct range_list
*right
)
1481 max
= rl_max(right
);
1482 if (sval_is_max(max
))
1487 if (sval_is_negative(max
))
1489 if (sval_cmp(rl_max(left
), max
) < 0)
1493 return alloc_rl(zero
, max
);
1496 static struct range_list
*get_neg_rl(struct range_list
*rl
)
1498 struct data_range
*tmp
;
1499 struct data_range
*new;
1500 struct range_list
*ret
= NULL
;
1504 if (sval_is_positive(rl_min(rl
)))
1507 FOR_EACH_PTR(rl
, tmp
) {
1508 if (sval_is_positive(tmp
->min
))
1510 if (sval_is_positive(tmp
->max
)) {
1511 new = alloc_range(tmp
->min
, tmp
->max
);
1512 new->max
.value
= -1;
1513 add_range(&ret
, new->min
, new->max
);
1516 add_range(&ret
, tmp
->min
, tmp
->max
);
1517 } END_FOR_EACH_PTR(tmp
);
1522 static struct range_list
*get_pos_rl(struct range_list
*rl
)
1524 struct data_range
*tmp
;
1525 struct data_range
*new;
1526 struct range_list
*ret
= NULL
;
1530 if (sval_is_negative(rl_max(rl
)))
1533 FOR_EACH_PTR(rl
, tmp
) {
1534 if (sval_is_negative(tmp
->max
))
1536 if (sval_is_positive(tmp
->min
)) {
1537 add_range(&ret
, tmp
->min
, tmp
->max
);
1540 new = alloc_range(tmp
->min
, tmp
->max
);
1542 add_range(&ret
, new->min
, new->max
);
1543 } END_FOR_EACH_PTR(tmp
);
1548 static struct range_list
*divide_rl_helper(struct range_list
*left
, struct range_list
*right
)
1550 sval_t right_min
, right_max
;
1553 if (!left
|| !right
)
1556 /* let's assume we never divide by zero */
1557 right_min
= rl_min(right
);
1558 right_max
= rl_max(right
);
1559 if (right_min
.value
== 0 && right_max
.value
== 0)
1561 if (right_min
.value
== 0)
1562 right_min
.value
= 1;
1563 if (right_max
.value
== 0)
1564 right_max
.value
= -1;
1566 max
= sval_binop(rl_max(left
), '/', right_min
);
1567 min
= sval_binop(rl_min(left
), '/', right_max
);
1569 return alloc_rl(min
, max
);
1572 static struct range_list
*handle_divide_rl(struct range_list
*left
, struct range_list
*right
)
1574 struct range_list
*left_neg
, *left_pos
, *right_neg
, *right_pos
;
1575 struct range_list
*neg_neg
, *neg_pos
, *pos_neg
, *pos_pos
;
1576 struct range_list
*ret
;
1578 if (is_whole_rl(right
))
1581 left_neg
= get_neg_rl(left
);
1582 left_pos
= get_pos_rl(left
);
1583 right_neg
= get_neg_rl(right
);
1584 right_pos
= get_pos_rl(right
);
1586 neg_neg
= divide_rl_helper(left_neg
, right_neg
);
1587 neg_pos
= divide_rl_helper(left_neg
, right_pos
);
1588 pos_neg
= divide_rl_helper(left_pos
, right_neg
);
1589 pos_pos
= divide_rl_helper(left_pos
, right_pos
);
1591 ret
= rl_union(neg_neg
, neg_pos
);
1592 ret
= rl_union(ret
, pos_neg
);
1593 return rl_union(ret
, pos_pos
);
1596 static struct range_list
*handle_add_mult_rl(struct range_list
*left
, int op
, struct range_list
*right
)
1600 if (sval_binop_overflows(rl_min(left
), op
, rl_min(right
)))
1602 min
= sval_binop(rl_min(left
), op
, rl_min(right
));
1604 if (sval_binop_overflows(rl_max(left
), op
, rl_max(right
)))
1606 max
= sval_binop(rl_max(left
), op
, rl_max(right
));
1608 return alloc_rl(min
, max
);
1611 static struct range_list
*handle_sub_rl(struct range_list
*left_orig
, struct range_list
*right_orig
)
1613 struct range_list
*left_rl
, *right_rl
;
1614 struct symbol
*type
;
1616 sval_t min_ll
, max_ll
, res_ll
;
1619 /* TODO: These things should totally be using dranges where possible */
1621 if (!left_orig
|| !right_orig
)
1625 if (type_positive_bits(rl_type(left_orig
)) > type_positive_bits(type
))
1626 type
= rl_type(left_orig
);
1627 if (type_positive_bits(rl_type(right_orig
)) > type_positive_bits(type
))
1628 type
= rl_type(right_orig
);
1630 left_rl
= cast_rl(type
, left_orig
);
1631 right_rl
= cast_rl(type
, right_orig
);
1633 max
= rl_max(left_rl
);
1634 min
= sval_type_min(type
);
1636 min_ll
= rl_min(left_rl
);
1637 min_ll
.type
= &llong_ctype
;
1638 max_ll
= rl_max(right_rl
);
1639 max_ll
.type
= &llong_ctype
;
1641 res_ll
.value
= min_ll
.value
- max_ll
.value
;
1643 if (!sval_binop_overflows(rl_min(left_rl
), '-', rl_max(right_rl
))) {
1644 tmp
= sval_binop(rl_min(left_rl
), '-', rl_max(right_rl
));
1645 if (sval_cmp(tmp
, min
) > 0)
1647 } else if (type_positive_bits(type
) < 63 &&
1648 !sval_binop_overflows(min_ll
, '-', max_ll
) &&
1649 (min
.value
!= 0 && sval_cmp(res_ll
, min
) >= 0)) {
1650 struct range_list
*left_casted
, *right_casted
, *result
;
1652 left_casted
= cast_rl(&llong_ctype
, left_orig
);
1653 right_casted
= cast_rl(&llong_ctype
, right_orig
);
1654 result
= handle_sub_rl(left_casted
, right_casted
);
1655 return cast_rl(type
, result
);
1658 if (!sval_is_max(rl_max(left_rl
))) {
1659 tmp
= sval_binop(rl_max(left_rl
), '-', rl_min(right_rl
));
1660 if (sval_cmp(tmp
, max
) < 0)
1664 if (sval_is_min(min
) && sval_is_max(max
))
1667 return alloc_rl(min
, max
);
1670 static unsigned long long rl_bits_always_set(struct range_list
*rl
)
1672 return sval_fls_mask(rl_min(rl
));
1675 static unsigned long long rl_bits_maybe_set(struct range_list
*rl
)
1677 return sval_fls_mask(rl_max(rl
));
1680 static struct range_list
*handle_OR_rl(struct range_list
*left
, struct range_list
*right
)
1682 unsigned long long left_min
, left_max
, right_min
, right_max
;
1686 if ((rl_to_sval(left
, &sval
) || rl_to_sval(right
, &sval
)) &&
1687 !sval_binop_overflows(rl_max(left
), '+', rl_max(right
)))
1688 return rl_binop(left
, '+', right
);
1690 left_min
= rl_bits_always_set(left
);
1691 left_max
= rl_bits_maybe_set(left
);
1692 right_min
= rl_bits_always_set(right
);
1693 right_max
= rl_bits_maybe_set(right
);
1695 min
.type
= max
.type
= &ullong_ctype
;
1696 min
.uvalue
= left_min
| right_min
;
1697 max
.uvalue
= left_max
| right_max
;
1699 return cast_rl(rl_type(left
), alloc_rl(min
, max
));
1702 static struct range_list
*handle_XOR_rl(struct range_list
*left
, struct range_list
*right
)
1704 unsigned long long left_set
, left_maybe
;
1705 unsigned long long right_set
, right_maybe
;
1708 left_set
= rl_bits_always_set(left
);
1709 left_maybe
= rl_bits_maybe_set(left
);
1711 right_set
= rl_bits_always_set(right
);
1712 right_maybe
= rl_bits_maybe_set(right
);
1714 zero
= max
= rl_min(left
);
1716 max
.uvalue
= fls_mask((left_maybe
| right_maybe
) ^ (left_set
& right_set
));
1718 return cast_rl(rl_type(left
), alloc_rl(zero
, max
));
1721 static struct range_list
*handle_AND_rl(struct range_list
*left
, struct range_list
*right
)
1723 unsigned long long left_set
, left_maybe
;
1724 unsigned long long right_set
, right_maybe
;
1729 left_set
= rl_bits_always_set(left
);
1730 left_maybe
= rl_bits_maybe_set(left
);
1732 right_set
= rl_bits_always_set(right
);
1733 right_maybe
= rl_bits_maybe_set(right
);
1735 zero
= max
= rl_min(left
);
1737 max
.uvalue
= fls_mask((left_maybe
| right_maybe
) ^ (left_set
& right_set
));
1739 return cast_rl(rl_type(left
), alloc_rl(zero
, max
));
1742 struct range_list
*rl_binop(struct range_list
*left
, int op
, struct range_list
*right
)
1744 struct symbol
*cast_type
;
1745 sval_t left_sval
, right_sval
;
1746 struct range_list
*ret
= NULL
;
1748 cast_type
= rl_type(left
);
1749 if (sval_type_max(rl_type(left
)).uvalue
< sval_type_max(rl_type(right
)).uvalue
)
1750 cast_type
= rl_type(right
);
1751 if (sval_type_max(cast_type
).uvalue
< INT_MAX
)
1752 cast_type
= &int_ctype
;
1754 left
= cast_rl(cast_type
, left
);
1755 right
= cast_rl(cast_type
, right
);
1757 if (!left
&& !right
)
1760 if (rl_to_sval(left
, &left_sval
) && rl_to_sval(right
, &right_sval
)) {
1761 sval_t val
= sval_binop(left_sval
, op
, right_sval
);
1762 return alloc_rl(val
, val
);
1767 ret
= handle_mod_rl(left
, right
);
1770 ret
= handle_divide_rl(left
, right
);
1774 ret
= handle_add_mult_rl(left
, op
, right
);
1777 ret
= handle_OR_rl(left
, right
);
1780 ret
= handle_XOR_rl(left
, right
);
1783 ret
= handle_AND_rl(left
, right
);
1786 ret
= handle_sub_rl(left
, right
);
1788 /* FIXME: Do the rest as well */
1789 case SPECIAL_RIGHTSHIFT
:
1790 case SPECIAL_LEFTSHIFT
:
1797 void free_data_info_allocs(void)
1799 struct allocator_struct
*desc
= &data_info_allocator
;
1800 struct allocation_blob
*blob
= desc
->blobs
;
1806 desc
->allocations
= 0;
1807 desc
->total_bytes
= 0;
1808 desc
->useful_bytes
= 0;
1809 desc
->freelist
= NULL
;
1811 struct allocation_blob
*next
= blob
->next
;
1812 blob_free(blob
, desc
->chunking
);
1815 clear_data_range_alloc();
1818 void split_comparison_rl(struct range_list
*left_orig
, int op
, struct range_list
*right_orig
,
1819 struct range_list
**left_true_rl
, struct range_list
**left_false_rl
,
1820 struct range_list
**right_true_rl
, struct range_list
**right_false_rl
)
1822 struct range_list
*left_true
, *left_false
;
1823 struct range_list
*right_true
, *right_false
;
1826 min
= sval_type_min(rl_type(left_orig
));
1827 max
= sval_type_max(rl_type(left_orig
));
1829 left_true
= clone_rl(left_orig
);
1830 left_false
= clone_rl(left_orig
);
1831 right_true
= clone_rl(right_orig
);
1832 right_false
= clone_rl(right_orig
);
1836 case SPECIAL_UNSIGNED_LT
:
1837 left_true
= remove_range(left_orig
, rl_max(right_orig
), max
);
1838 if (!sval_is_min(rl_min(right_orig
))) {
1839 left_false
= remove_range(left_orig
, min
, sub_one(rl_min(right_orig
)));
1842 right_true
= remove_range(right_orig
, min
, rl_min(left_orig
));
1843 if (!sval_is_max(rl_max(left_orig
)))
1844 right_false
= remove_range(right_orig
, add_one(rl_max(left_orig
)), max
);
1846 case SPECIAL_UNSIGNED_LTE
:
1848 if (!sval_is_max(rl_max(right_orig
)))
1849 left_true
= remove_range(left_orig
, add_one(rl_max(right_orig
)), max
);
1850 left_false
= remove_range(left_orig
, min
, rl_min(right_orig
));
1852 if (!sval_is_min(rl_min(left_orig
)))
1853 right_true
= remove_range(right_orig
, min
, sub_one(rl_min(left_orig
)));
1854 right_false
= remove_range(right_orig
, rl_max(left_orig
), max
);
1856 if (sval_cmp(rl_min(left_orig
), rl_min(right_orig
)) == 0)
1857 left_false
= remove_range(left_false
, rl_min(left_orig
), rl_min(left_orig
));
1858 if (sval_cmp(rl_max(left_orig
), rl_max(right_orig
)) == 0)
1859 right_false
= remove_range(right_false
, rl_max(left_orig
), rl_max(left_orig
));
1862 left_true
= rl_intersection(left_orig
, right_orig
);
1863 right_true
= clone_rl(left_true
);
1865 if (sval_cmp(rl_min(right_orig
), rl_max(right_orig
)) == 0)
1866 left_false
= remove_range(left_orig
, rl_min(right_orig
), rl_min(right_orig
));
1867 if (sval_cmp(rl_min(left_orig
), rl_max(left_orig
)) == 0)
1868 right_false
= remove_range(right_orig
, rl_min(left_orig
), rl_min(left_orig
));
1870 case SPECIAL_UNSIGNED_GTE
:
1872 if (!sval_is_min(rl_min(right_orig
)))
1873 left_true
= remove_range(left_orig
, min
, sub_one(rl_min(right_orig
)));
1874 left_false
= remove_range(left_orig
, rl_max(right_orig
), max
);
1876 if (!sval_is_max(rl_max(left_orig
)))
1877 right_true
= remove_range(right_orig
, add_one(rl_max(left_orig
)), max
);
1878 right_false
= remove_range(right_orig
, min
, rl_min(left_orig
));
1880 if (sval_cmp(rl_min(left_orig
), rl_min(right_orig
)) == 0)
1881 right_false
= remove_range(right_false
, rl_min(left_orig
), rl_min(left_orig
));
1882 if (sval_cmp(rl_max(left_orig
), rl_max(right_orig
)) == 0)
1883 left_false
= remove_range(left_false
, rl_max(left_orig
), rl_max(left_orig
));
1886 case SPECIAL_UNSIGNED_GT
:
1887 left_true
= remove_range(left_orig
, min
, rl_min(right_orig
));
1888 if (!sval_is_max(rl_max(right_orig
)))
1889 left_false
= remove_range(left_orig
, add_one(rl_max(right_orig
)), max
);
1891 right_true
= remove_range(right_orig
, rl_max(left_orig
), max
);
1892 if (!sval_is_min(rl_min(left_orig
)))
1893 right_false
= remove_range(right_orig
, min
, sub_one(rl_min(left_orig
)));
1895 case SPECIAL_NOTEQUAL
:
1896 left_false
= rl_intersection(left_orig
, right_orig
);
1897 right_false
= clone_rl(left_false
);
1899 if (sval_cmp(rl_min(right_orig
), rl_max(right_orig
)) == 0)
1900 left_true
= remove_range(left_orig
, rl_min(right_orig
), rl_min(right_orig
));
1901 if (sval_cmp(rl_min(left_orig
), rl_max(left_orig
)) == 0)
1902 right_true
= remove_range(right_orig
, rl_min(left_orig
), rl_min(left_orig
));
1905 sm_perror(" unhandled comparison %d", op
);
1910 *left_true_rl
= left_true
;
1911 *left_false_rl
= left_false
;
1913 if (right_true_rl
) {
1914 *right_true_rl
= right_true
;
1915 *right_false_rl
= right_false
;