2 * Copyright (C) 2009 Dan Carpenter.
4 * This program is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU General Public License
6 * as published by the Free Software Foundation; either version 2
7 * of the License, or (at your option) any later version.
9 * This program is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 * GNU General Public License for more details.
14 * You should have received a copy of the GNU General Public License
15 * along with this program; if not, see http://www.gnu.org/copyleft/gpl.txt
20 #include "smatch_extra.h"
21 #include "smatch_slist.h"
23 ALLOCATOR(data_info
, "smatch extra data");
24 ALLOCATOR(data_range
, "data range");
25 __DO_ALLOCATOR(struct data_range
, sizeof(struct data_range
), __alignof__(struct data_range
),
26 "permanent ranges", perm_data_range
);
27 __DECLARE_ALLOCATOR(struct ptr_list
, rl_ptrlist
);
29 char *show_rl(struct range_list
*list
)
31 struct data_range
*tmp
;
36 full
[sizeof(full
) - 1] = '\0';
37 FOR_EACH_PTR(list
, tmp
) {
39 strncat(full
, ",", 254 - strlen(full
));
40 if (sval_cmp(tmp
->min
, tmp
->max
) == 0) {
41 strncat(full
, sval_to_str(tmp
->min
), 254 - strlen(full
));
44 strncat(full
, sval_to_str(tmp
->min
), 254 - strlen(full
));
45 strncat(full
, "-", 254 - strlen(full
));
46 strncat(full
, sval_to_str(tmp
->max
), 254 - strlen(full
));
47 } END_FOR_EACH_PTR(tmp
);
48 if (strlen(full
) == sizeof(full
) - 1)
49 full
[sizeof(full
) - 2] = '+';
50 return alloc_sname(full
);
53 void free_all_rl(void)
55 clear_rl_ptrlist_alloc();
58 static int sval_too_big(struct symbol
*type
, sval_t sval
)
60 if (type_bits(type
) >= 32 &&
61 type_bits(sval
.type
) <= type_bits(type
))
63 if (sval
.uvalue
<= ((1ULL << type_bits(type
)) - 1))
65 if (type_signed(sval
.type
)) {
66 if (type_unsigned(type
)) {
67 unsigned long long neg
= ~sval
.uvalue
;
68 if (neg
<= sval_type_max(type
).uvalue
)
71 if (sval
.value
< sval_type_min(type
).value
)
73 if (sval
.value
> sval_type_max(type
).value
)
77 if (sval
.uvalue
> sval_type_max(type
).uvalue
)
82 static int truncates_nicely(struct symbol
*type
, sval_t min
, sval_t max
)
84 unsigned long long mask
;
85 int bits
= type_bits(type
);
87 if (bits
>= type_bits(min
.type
))
90 mask
= (-1ULL >> (64 - bits
)) << (64 - bits
);
91 return (min
.uvalue
& mask
) == (max
.uvalue
& mask
);
94 static void add_range_t(struct symbol
*type
, struct range_list
**rl
, sval_t min
, sval_t max
)
96 /* If we're just adding a number, cast it and add it */
97 if (sval_cmp(min
, max
) == 0) {
98 add_range(rl
, sval_cast(type
, min
), sval_cast(type
, max
));
102 /* If the range is within the type range then add it */
103 if (sval_fits(type
, min
) && sval_fits(type
, max
)) {
104 add_range(rl
, sval_cast(type
, min
), sval_cast(type
, max
));
108 if (truncates_nicely(type
, min
, max
)) {
109 add_range(rl
, sval_cast(type
, min
), sval_cast(type
, max
));
114 * If the range we are adding has more bits than the range type then
115 * add the whole range type. Eg:
116 * 0x8000000000000000 - 0xf000000000000000 -> cast to int
119 if (sval_too_big(type
, min
) || sval_too_big(type
, max
)) {
120 add_range(rl
, sval_type_min(type
), sval_type_max(type
));
124 /* Cast negative values to high positive values */
125 if (sval_is_negative(min
) && type_unsigned(type
)) {
126 if (sval_is_positive(max
)) {
127 if (sval_too_high(type
, max
)) {
128 add_range(rl
, sval_type_min(type
), sval_type_max(type
));
131 add_range(rl
, sval_type_val(type
, 0), sval_cast(type
, max
));
132 max
= sval_type_max(type
);
134 max
= sval_cast(type
, max
);
136 min
= sval_cast(type
, min
);
137 add_range(rl
, min
, max
);
140 /* Cast high positive numbers to negative */
141 if (sval_unsigned(max
) && sval_is_negative(sval_cast(type
, max
))) {
142 if (!sval_is_negative(sval_cast(type
, min
))) {
143 add_range(rl
, sval_cast(type
, min
), sval_type_max(type
));
144 min
= sval_type_min(type
);
146 min
= sval_cast(type
, min
);
148 max
= sval_cast(type
, max
);
149 add_range(rl
, min
, max
);
152 add_range(rl
, sval_cast(type
, min
), sval_cast(type
, max
));
156 static int str_to_comparison_arg_helper(const char *str
,
157 struct expression
*call
, int *comparison
,
158 struct expression
**arg
, const char **endp
)
170 *comparison
= SPECIAL_LTE
;
175 } else if (*c
== '=') {
178 *comparison
= SPECIAL_EQUAL
;
179 } else if (*c
== '>') {
182 *comparison
= SPECIAL_GTE
;
187 } else if (*c
== '!') {
190 *comparison
= SPECIAL_NOTEQUAL
;
199 param
= strtoll(c
, (char **)&c
, 10);
201 c
++; /* skip the ']' character */
207 *arg
= get_argument_from_call_expr(call
->args
, param
);
210 if (*c
== '-' && *(c
+ 1) == '>') {
214 n
= snprintf(buf
, sizeof(buf
), "$%s", c
);
215 if (n
>= sizeof(buf
))
217 if (buf
[n
- 1] == ']')
219 *arg
= gen_expression_from_key(*arg
, buf
);
220 while (*c
&& *c
!= ']')
226 int str_to_comparison_arg(const char *str
, struct expression
*call
, int *comparison
, struct expression
**arg
)
235 return str_to_comparison_arg_helper(str
, call
, comparison
, arg
, NULL
);
238 static int get_val_from_key(int use_max
, struct symbol
*type
, const char *c
, struct expression
*call
, const char **endp
, sval_t
*sval
)
240 struct expression
*arg
;
245 ret
= sval_type_max(type
);
247 ret
= sval_type_min(type
);
249 if (!str_to_comparison_arg_helper(c
, call
, &comparison
, &arg
, endp
)) {
254 if (use_max
&& get_implied_max(arg
, &tmp
)) {
256 if (comparison
== '<') {
258 ret
= sval_binop(ret
, '-', tmp
);
261 if (!use_max
&& get_implied_min(arg
, &tmp
)) {
263 if (comparison
== '>') {
265 ret
= sval_binop(ret
, '+', tmp
);
273 static sval_t
add_one(sval_t sval
)
279 static sval_t
sub_one(sval_t sval
)
285 void filter_by_comparison(struct range_list
**rl
, int comparison
, struct range_list
*right
)
287 struct range_list
*left_orig
= *rl
;
288 struct range_list
*right_orig
= right
;
289 struct range_list
*ret_rl
= *rl
;
290 struct symbol
*cast_type
;
293 cast_type
= rl_type(left_orig
);
294 if (sval_type_max(rl_type(left_orig
)).uvalue
< sval_type_max(rl_type(right_orig
)).uvalue
)
295 cast_type
= rl_type(right_orig
);
296 if (sval_type_max(cast_type
).uvalue
< INT_MAX
)
297 cast_type
= &int_ctype
;
299 min
= sval_type_min(cast_type
);
300 max
= sval_type_max(cast_type
);
301 left_orig
= cast_rl(cast_type
, left_orig
);
302 right_orig
= cast_rl(cast_type
, right_orig
);
304 switch (comparison
) {
306 case SPECIAL_UNSIGNED_LT
:
307 ret_rl
= remove_range(left_orig
, rl_max(right_orig
), max
);
310 case SPECIAL_UNSIGNED_LTE
:
311 if (!sval_is_max(rl_max(right_orig
)))
312 ret_rl
= remove_range(left_orig
, add_one(rl_max(right_orig
)), max
);
315 ret_rl
= rl_intersection(left_orig
, right_orig
);
318 case SPECIAL_UNSIGNED_GTE
:
319 if (!sval_is_min(rl_min(right_orig
)))
320 ret_rl
= remove_range(left_orig
, min
, sub_one(rl_min(right_orig
)));
323 case SPECIAL_UNSIGNED_GT
:
324 ret_rl
= remove_range(left_orig
, min
, rl_min(right_orig
));
326 case SPECIAL_NOTEQUAL
:
327 if (sval_cmp(rl_min(right_orig
), rl_max(right_orig
)) == 0)
328 ret_rl
= remove_range(left_orig
, rl_min(right_orig
), rl_min(right_orig
));
331 sm_perror("unhandled comparison %s", show_special(comparison
));
335 *rl
= cast_rl(rl_type(*rl
), ret_rl
);
338 static struct range_list
*filter_by_comparison_call(const char *c
, struct expression
*call
, const char **endp
, struct range_list
*start_rl
)
341 struct expression
*arg
;
342 struct range_list
*casted_start
, *right_orig
;
345 if (!str_to_comparison_arg_helper(c
, call
, &comparison
, &arg
, endp
))
348 if (!get_implied_rl(arg
, &right_orig
))
352 if (type_positive_bits(rl_type(start_rl
)) > type_positive_bits(type
))
353 type
= rl_type(start_rl
);
354 if (type_positive_bits(rl_type(right_orig
)) > type_positive_bits(type
))
355 type
= rl_type(right_orig
);
357 casted_start
= cast_rl(type
, start_rl
);
358 right_orig
= cast_rl(type
, right_orig
);
360 filter_by_comparison(&casted_start
, comparison
, right_orig
);
361 return cast_rl(rl_type(start_rl
), casted_start
);
364 static sval_t
parse_val(int use_max
, struct expression
*call
, struct symbol
*type
, const char *c
, const char **endp
)
366 const char *start
= c
;
369 if (!strncmp(start
, "max", 3)) {
370 ret
= sval_type_max(type
);
372 } else if (!strncmp(start
, "u64max", 6)) {
373 ret
= sval_type_val(type
, ULLONG_MAX
);
375 } else if (!strncmp(start
, "s64max", 6)) {
376 ret
= sval_type_val(type
, LLONG_MAX
);
378 } else if (!strncmp(start
, "u32max", 6)) {
379 ret
= sval_type_val(type
, UINT_MAX
);
381 } else if (!strncmp(start
, "s32max", 6)) {
382 ret
= sval_type_val(type
, INT_MAX
);
384 } else if (!strncmp(start
, "u16max", 6)) {
385 ret
= sval_type_val(type
, USHRT_MAX
);
387 } else if (!strncmp(start
, "s16max", 6)) {
388 ret
= sval_type_val(type
, SHRT_MAX
);
390 } else if (!strncmp(start
, "min", 3)) {
391 ret
= sval_type_min(type
);
393 } else if (!strncmp(start
, "s64min", 6)) {
394 ret
= sval_type_val(type
, LLONG_MIN
);
396 } else if (!strncmp(start
, "s32min", 6)) {
397 ret
= sval_type_val(type
, INT_MIN
);
399 } else if (!strncmp(start
, "s16min", 6)) {
400 ret
= sval_type_val(type
, SHRT_MIN
);
402 } else if (!strncmp(start
, "long_min", 8)) {
403 ret
= sval_type_val(type
, LONG_MIN
);
405 } else if (!strncmp(start
, "long_max", 8)) {
406 ret
= sval_type_val(type
, LONG_MAX
);
408 } else if (!strncmp(start
, "ulong_max", 9)) {
409 ret
= sval_type_val(type
, ULONG_MAX
);
411 } else if (!strncmp(start
, "ptr_max", 7)) {
412 ret
= sval_type_val(type
, valid_ptr_max
);
414 } else if (start
[0] == '[') {
415 /* this parses [==p0] comparisons */
416 get_val_from_key(1, type
, start
, call
, &c
, &ret
);
417 } else if (type_positive_bits(type
) == 64) {
418 ret
= sval_type_val(type
, strtoull(start
, (char **)&c
, 0));
420 ret
= sval_type_val(type
, strtoll(start
, (char **)&c
, 0));
426 static const char *jump_to_call_math(const char *value
)
428 const char *c
= value
;
430 while (*c
&& *c
!= '[')
436 if (*c
== '<' || *c
== '=' || *c
== '>' || *c
== '!')
442 static void str_to_rl_helper(struct expression
*call
, struct symbol
*type
, const char *str
, const char **endp
, struct range_list
**rl
)
444 struct range_list
*rl_tmp
= NULL
;
448 min
= sval_type_min(type
);
449 max
= sval_type_max(type
);
451 while (*c
!= '\0' && *c
!= '[') {
453 if (sval_cmp(min
, sval_type_min(type
)) != 0)
455 max
= sval_type_max(type
);
456 add_range_t(type
, &rl_tmp
, min
, max
);
461 min
= parse_val(0, call
, type
, c
, &c
);
462 if (!sval_fits(type
, min
))
463 min
= sval_type_min(type
);
467 if (*c
== '\0' || *c
== '[') {
468 add_range_t(type
, &rl_tmp
, min
, min
);
472 add_range_t(type
, &rl_tmp
, min
, min
);
477 min
= sval_type_max(type
);
481 sm_msg("debug XXX: trouble parsing %s c = %s", str
, c
);
487 max
= parse_val(1, call
, type
, c
, &c
);
488 if (!sval_fits(type
, max
))
489 max
= sval_type_max(type
);
491 max
= sval_type_max(type
);
494 add_range_t(type
, &rl_tmp
, min
, max
);
505 static void str_to_dinfo(struct expression
*call
, struct symbol
*type
, const char *value
, struct data_info
*dinfo
)
507 struct range_list
*math_rl
;
508 const char *call_math
;
510 struct range_list
*rl
= NULL
;
515 if (strcmp(value
, "empty") == 0)
518 if (strncmp(value
, "[==$", 4) == 0) {
519 struct expression
*arg
;
522 if (!str_to_comparison_arg(value
, call
, &comparison
, &arg
))
524 if (!get_implied_rl(arg
, &rl
))
529 str_to_rl_helper(call
, type
, value
, &c
, &rl
);
533 call_math
= jump_to_call_math(value
);
534 if (call_math
&& parse_call_math_rl(call
, call_math
, &math_rl
)) {
535 rl
= rl_intersection(rl
, math_rl
);
540 * For now if we already tried to handle the call math and couldn't
541 * figure it out then bail.
543 if (jump_to_call_math(c
) == c
+ 1)
546 rl
= filter_by_comparison_call(c
, call
, &c
, rl
);
549 rl
= cast_rl(type
, rl
);
550 dinfo
->value_ranges
= rl
;
553 void str_to_rl(struct symbol
*type
, char *value
, struct range_list
**rl
)
555 struct data_info dinfo
= {};
557 str_to_dinfo(NULL
, type
, value
, &dinfo
);
558 *rl
= dinfo
.value_ranges
;
561 void call_results_to_rl(struct expression
*expr
, struct symbol
*type
, const char *value
, struct range_list
**rl
)
563 struct data_info dinfo
= {};
565 str_to_dinfo(strip_expr(expr
), type
, value
, &dinfo
);
566 *rl
= dinfo
.value_ranges
;
569 int is_whole_rl(struct range_list
*rl
)
571 struct data_range
*drange
;
573 if (ptr_list_empty(rl
))
575 drange
= first_ptr_list((struct ptr_list
*)rl
);
576 if (sval_is_min(drange
->min
) && sval_is_max(drange
->max
))
581 int is_whole_rl_non_zero(struct range_list
*rl
)
583 struct data_range
*drange
;
585 if (ptr_list_empty(rl
))
587 drange
= first_ptr_list((struct ptr_list
*)rl
);
588 if (sval_unsigned(drange
->min
) &&
589 drange
->min
.value
== 1 &&
590 sval_is_max(drange
->max
))
592 if (!sval_is_min(drange
->min
) || drange
->max
.value
!= -1)
594 drange
= last_ptr_list((struct ptr_list
*)rl
);
595 if (drange
->min
.value
!= 1 || !sval_is_max(drange
->max
))
600 sval_t
rl_min(struct range_list
*rl
)
602 struct data_range
*drange
;
605 ret
.type
= &llong_ctype
;
606 ret
.value
= LLONG_MIN
;
607 if (ptr_list_empty(rl
))
609 drange
= first_ptr_list((struct ptr_list
*)rl
);
613 sval_t
rl_max(struct range_list
*rl
)
615 struct data_range
*drange
;
618 ret
.type
= &llong_ctype
;
619 ret
.value
= LLONG_MAX
;
620 if (ptr_list_empty(rl
))
622 drange
= last_ptr_list((struct ptr_list
*)rl
);
626 int rl_to_sval(struct range_list
*rl
, sval_t
*sval
)
635 if (sval_cmp(min
, max
) != 0)
641 struct symbol
*rl_type(struct range_list
*rl
)
645 return rl_min(rl
).type
;
648 static struct data_range
*alloc_range_helper_sval(sval_t min
, sval_t max
, int perm
)
650 struct data_range
*ret
;
653 ret
= __alloc_perm_data_range(0);
655 ret
= __alloc_data_range(0);
661 struct data_range
*alloc_range(sval_t min
, sval_t max
)
663 return alloc_range_helper_sval(min
, max
, 0);
666 struct data_range
*alloc_range_perm(sval_t min
, sval_t max
)
668 return alloc_range_helper_sval(min
, max
, 1);
671 struct range_list
*alloc_rl(sval_t min
, sval_t max
)
673 struct range_list
*rl
= NULL
;
675 if (sval_cmp(min
, max
) > 0)
676 return alloc_whole_rl(min
.type
);
678 add_range(&rl
, min
, max
);
682 struct range_list
*alloc_whole_rl(struct symbol
*type
)
684 if (!type
|| type_positive_bits(type
) < 0)
686 if (type
->type
== SYM_ARRAY
)
689 return alloc_rl(sval_type_min(type
), sval_type_max(type
));
692 extern int rl_ptrlist_hack
;
693 void add_range(struct range_list
**list
, sval_t min
, sval_t max
)
695 struct data_range
*tmp
;
696 struct data_range
*new = NULL
;
700 * There is at least on valid reason why the types might be confusing
701 * and that's when you have a void pointer and on some paths you treat
702 * it as a u8 pointer and on other paths you treat it as a u16 pointer.
703 * This case is hard to deal with.
705 * There are other cases where we probably should be more specific about
706 * the types than we are. For example, we end up merging a lot of ulong
707 * with pointers and I have not figured out why we do that.
709 * But this hack works for both cases, I think. We cast it to pointers
710 * or we use the bigger size.
713 if (*list
&& rl_type(*list
) != min
.type
) {
714 if (rl_type(*list
)->type
== SYM_PTR
) {
715 min
= sval_cast(rl_type(*list
), min
);
716 max
= sval_cast(rl_type(*list
), max
);
717 } else if (min
.type
->type
== SYM_PTR
) {
718 *list
= cast_rl(min
.type
, *list
);
719 } else if (type_bits(rl_type(*list
)) >= type_bits(min
.type
)) {
720 min
= sval_cast(rl_type(*list
), min
);
721 max
= sval_cast(rl_type(*list
), max
);
723 *list
= cast_rl(min
.type
, *list
);
727 if (sval_cmp(min
, max
) > 0) {
728 min
= sval_type_min(min
.type
);
729 max
= sval_type_max(min
.type
);
733 * FIXME: This has a problem merging a range_list like: min-0,3-max
734 * with a range like 1-2. You end up with min-2,3-max instead of
737 FOR_EACH_PTR(*list
, tmp
) {
739 /* Sometimes we overlap with more than one range
740 so we have to delete or modify the next range. */
741 if (!sval_is_max(max
) && max
.value
+ 1 == tmp
->min
.value
) {
742 /* join 2 ranges here */
744 DELETE_CURRENT_PTR(tmp
);
748 /* Doesn't overlap with the next one. */
749 if (sval_cmp(max
, tmp
->min
) < 0)
752 if (sval_cmp(max
, tmp
->max
) <= 0) {
753 /* Partially overlaps the next one. */
755 DELETE_CURRENT_PTR(tmp
);
758 /* Completely overlaps the next one. */
759 DELETE_CURRENT_PTR(tmp
);
760 /* there could be more ranges to delete */
764 if (!sval_is_max(max
) && max
.value
+ 1 == tmp
->min
.value
) {
765 /* join 2 ranges into a big range */
766 new = alloc_range(min
, tmp
->max
);
767 REPLACE_CURRENT_PTR(tmp
, new);
770 if (sval_cmp(max
, tmp
->min
) < 0) { /* new range entirely below */
771 new = alloc_range(min
, max
);
772 INSERT_CURRENT(new, tmp
);
775 if (sval_cmp(min
, tmp
->min
) < 0) { /* new range partially below */
776 if (sval_cmp(max
, tmp
->max
) < 0)
780 new = alloc_range(min
, max
);
781 REPLACE_CURRENT_PTR(tmp
, new);
786 if (sval_cmp(max
, tmp
->max
) <= 0) /* new range already included */
788 if (sval_cmp(min
, tmp
->max
) <= 0) { /* new range partially above */
790 new = alloc_range(min
, max
);
791 REPLACE_CURRENT_PTR(tmp
, new);
795 if (!sval_is_min(min
) && min
.value
- 1 == tmp
->max
.value
) {
796 /* join 2 ranges into a big range */
797 new = alloc_range(tmp
->min
, max
);
798 REPLACE_CURRENT_PTR(tmp
, new);
802 /* the new range is entirely above the existing ranges */
803 } END_FOR_EACH_PTR(tmp
);
806 new = alloc_range(min
, max
);
809 add_ptr_list(list
, new);
813 struct range_list
*clone_rl(struct range_list
*list
)
815 struct data_range
*tmp
;
816 struct range_list
*ret
= NULL
;
818 FOR_EACH_PTR(list
, tmp
) {
819 add_ptr_list(&ret
, tmp
);
820 } END_FOR_EACH_PTR(tmp
);
824 struct range_list
*clone_rl_permanent(struct range_list
*list
)
826 struct data_range
*tmp
;
827 struct data_range
*new;
828 struct range_list
*ret
= NULL
;
830 FOR_EACH_PTR(list
, tmp
) {
831 new = alloc_range_perm(tmp
->min
, tmp
->max
);
832 add_ptr_list(&ret
, new);
833 } END_FOR_EACH_PTR(tmp
);
837 struct range_list
*rl_union(struct range_list
*one
, struct range_list
*two
)
839 struct data_range
*tmp
;
840 struct range_list
*ret
= NULL
;
842 FOR_EACH_PTR(one
, tmp
) {
843 add_range(&ret
, tmp
->min
, tmp
->max
);
844 } END_FOR_EACH_PTR(tmp
);
845 FOR_EACH_PTR(two
, tmp
) {
846 add_range(&ret
, tmp
->min
, tmp
->max
);
847 } END_FOR_EACH_PTR(tmp
);
851 struct range_list
*remove_range(struct range_list
*list
, sval_t min
, sval_t max
)
853 struct data_range
*tmp
;
854 struct range_list
*ret
= NULL
;
859 min
= sval_cast(rl_type(list
), min
);
860 max
= sval_cast(rl_type(list
), max
);
861 if (sval_cmp(min
, max
) > 0) {
867 FOR_EACH_PTR(list
, tmp
) {
868 if (sval_cmp(tmp
->max
, min
) < 0) {
869 add_range(&ret
, tmp
->min
, tmp
->max
);
872 if (sval_cmp(tmp
->min
, max
) > 0) {
873 add_range(&ret
, tmp
->min
, tmp
->max
);
876 if (sval_cmp(tmp
->min
, min
) >= 0 && sval_cmp(tmp
->max
, max
) <= 0)
878 if (sval_cmp(tmp
->min
, min
) >= 0) {
880 add_range(&ret
, max
, tmp
->max
);
881 } else if (sval_cmp(tmp
->max
, max
) <= 0) {
883 add_range(&ret
, tmp
->min
, min
);
887 add_range(&ret
, tmp
->min
, min
);
888 add_range(&ret
, max
, tmp
->max
);
890 } END_FOR_EACH_PTR(tmp
);
894 int ranges_equiv(struct data_range
*one
, struct data_range
*two
)
900 if (sval_cmp(one
->min
, two
->min
) != 0)
902 if (sval_cmp(one
->max
, two
->max
) != 0)
907 int rl_equiv(struct range_list
*one
, struct range_list
*two
)
909 struct data_range
*one_range
;
910 struct data_range
*two_range
;
915 PREPARE_PTR_LIST(one
, one_range
);
916 PREPARE_PTR_LIST(two
, two_range
);
918 if (!one_range
&& !two_range
)
920 if (!ranges_equiv(one_range
, two_range
))
922 NEXT_PTR_LIST(one_range
);
923 NEXT_PTR_LIST(two_range
);
925 FINISH_PTR_LIST(two_range
);
926 FINISH_PTR_LIST(one_range
);
931 int true_comparison_range(struct data_range
*left
, int comparison
, struct data_range
*right
)
933 switch (comparison
) {
935 case SPECIAL_UNSIGNED_LT
:
936 if (sval_cmp(left
->min
, right
->max
) < 0)
939 case SPECIAL_UNSIGNED_LTE
:
941 if (sval_cmp(left
->min
, right
->max
) <= 0)
945 if (sval_cmp(left
->max
, right
->min
) < 0)
947 if (sval_cmp(left
->min
, right
->max
) > 0)
950 case SPECIAL_UNSIGNED_GTE
:
952 if (sval_cmp(left
->max
, right
->min
) >= 0)
956 case SPECIAL_UNSIGNED_GT
:
957 if (sval_cmp(left
->max
, right
->min
) > 0)
960 case SPECIAL_NOTEQUAL
:
961 if (sval_cmp(left
->min
, left
->max
) != 0)
963 if (sval_cmp(right
->min
, right
->max
) != 0)
965 if (sval_cmp(left
->min
, right
->min
) != 0)
969 sm_perror("unhandled comparison %d", comparison
);
975 int true_comparison_range_LR(int comparison
, struct data_range
*var
, struct data_range
*val
, int left
)
978 return true_comparison_range(var
, comparison
, val
);
980 return true_comparison_range(val
, comparison
, var
);
983 static int false_comparison_range_sval(struct data_range
*left
, int comparison
, struct data_range
*right
)
985 switch (comparison
) {
987 case SPECIAL_UNSIGNED_LT
:
988 if (sval_cmp(left
->max
, right
->min
) >= 0)
991 case SPECIAL_UNSIGNED_LTE
:
993 if (sval_cmp(left
->max
, right
->min
) > 0)
997 if (sval_cmp(left
->min
, left
->max
) != 0)
999 if (sval_cmp(right
->min
, right
->max
) != 0)
1001 if (sval_cmp(left
->min
, right
->min
) != 0)
1004 case SPECIAL_UNSIGNED_GTE
:
1006 if (sval_cmp(left
->min
, right
->max
) < 0)
1010 case SPECIAL_UNSIGNED_GT
:
1011 if (sval_cmp(left
->min
, right
->max
) <= 0)
1014 case SPECIAL_NOTEQUAL
:
1015 if (sval_cmp(left
->max
, right
->min
) < 0)
1017 if (sval_cmp(left
->min
, right
->max
) > 0)
1021 sm_perror("unhandled comparison %d", comparison
);
1027 int false_comparison_range_LR(int comparison
, struct data_range
*var
, struct data_range
*val
, int left
)
1030 return false_comparison_range_sval(var
, comparison
, val
);
1032 return false_comparison_range_sval(val
, comparison
, var
);
1035 int possibly_true(struct expression
*left
, int comparison
, struct expression
*right
)
1037 struct range_list
*rl_left
, *rl_right
;
1038 struct data_range
*tmp_left
, *tmp_right
;
1039 struct symbol
*type
;
1041 if (!get_implied_rl(left
, &rl_left
))
1043 if (!get_implied_rl(right
, &rl_right
))
1046 type
= rl_type(rl_left
);
1047 if (type_positive_bits(type
) < type_positive_bits(rl_type(rl_right
)))
1048 type
= rl_type(rl_right
);
1049 if (type_positive_bits(type
) < 31)
1052 rl_left
= cast_rl(type
, rl_left
);
1053 rl_right
= cast_rl(type
, rl_right
);
1055 FOR_EACH_PTR(rl_left
, tmp_left
) {
1056 FOR_EACH_PTR(rl_right
, tmp_right
) {
1057 if (true_comparison_range(tmp_left
, comparison
, tmp_right
))
1059 } END_FOR_EACH_PTR(tmp_right
);
1060 } END_FOR_EACH_PTR(tmp_left
);
1064 int possibly_false(struct expression
*left
, int comparison
, struct expression
*right
)
1066 struct range_list
*rl_left
, *rl_right
;
1067 struct data_range
*tmp_left
, *tmp_right
;
1068 struct symbol
*type
;
1070 if (!get_implied_rl(left
, &rl_left
))
1072 if (!get_implied_rl(right
, &rl_right
))
1075 type
= rl_type(rl_left
);
1076 if (type_positive_bits(type
) < type_positive_bits(rl_type(rl_right
)))
1077 type
= rl_type(rl_right
);
1078 if (type_positive_bits(type
) < 31)
1081 rl_left
= cast_rl(type
, rl_left
);
1082 rl_right
= cast_rl(type
, rl_right
);
1084 FOR_EACH_PTR(rl_left
, tmp_left
) {
1085 FOR_EACH_PTR(rl_right
, tmp_right
) {
1086 if (false_comparison_range_sval(tmp_left
, comparison
, tmp_right
))
1088 } END_FOR_EACH_PTR(tmp_right
);
1089 } END_FOR_EACH_PTR(tmp_left
);
1093 int possibly_true_rl(struct range_list
*left_ranges
, int comparison
, struct range_list
*right_ranges
)
1095 struct data_range
*left_tmp
, *right_tmp
;
1096 struct symbol
*type
;
1098 if (!left_ranges
|| !right_ranges
)
1101 type
= rl_type(left_ranges
);
1102 if (type_positive_bits(type
) < type_positive_bits(rl_type(right_ranges
)))
1103 type
= rl_type(right_ranges
);
1104 if (type_positive_bits(type
) < 31)
1107 left_ranges
= cast_rl(type
, left_ranges
);
1108 right_ranges
= cast_rl(type
, right_ranges
);
1110 FOR_EACH_PTR(left_ranges
, left_tmp
) {
1111 FOR_EACH_PTR(right_ranges
, right_tmp
) {
1112 if (true_comparison_range(left_tmp
, comparison
, right_tmp
))
1114 } END_FOR_EACH_PTR(right_tmp
);
1115 } END_FOR_EACH_PTR(left_tmp
);
1119 int possibly_false_rl(struct range_list
*left_ranges
, int comparison
, struct range_list
*right_ranges
)
1121 struct data_range
*left_tmp
, *right_tmp
;
1122 struct symbol
*type
;
1124 if (!left_ranges
|| !right_ranges
)
1127 type
= rl_type(left_ranges
);
1128 if (type_positive_bits(type
) < type_positive_bits(rl_type(right_ranges
)))
1129 type
= rl_type(right_ranges
);
1130 if (type_positive_bits(type
) < 31)
1133 left_ranges
= cast_rl(type
, left_ranges
);
1134 right_ranges
= cast_rl(type
, right_ranges
);
1136 FOR_EACH_PTR(left_ranges
, left_tmp
) {
1137 FOR_EACH_PTR(right_ranges
, right_tmp
) {
1138 if (false_comparison_range_sval(left_tmp
, comparison
, right_tmp
))
1140 } END_FOR_EACH_PTR(right_tmp
);
1141 } END_FOR_EACH_PTR(left_tmp
);
1145 /* FIXME: the _rl here stands for right left so really it should be _lr */
1146 int possibly_true_rl_LR(int comparison
, struct range_list
*a
, struct range_list
*b
, int left
)
1149 return possibly_true_rl(a
, comparison
, b
);
1151 return possibly_true_rl(b
, comparison
, a
);
1154 int possibly_false_rl_LR(int comparison
, struct range_list
*a
, struct range_list
*b
, int left
)
1157 return possibly_false_rl(a
, comparison
, b
);
1159 return possibly_false_rl(b
, comparison
, a
);
1162 int rl_has_sval(struct range_list
*rl
, sval_t sval
)
1164 struct data_range
*tmp
;
1166 FOR_EACH_PTR(rl
, tmp
) {
1167 if (sval_cmp(tmp
->min
, sval
) <= 0 &&
1168 sval_cmp(tmp
->max
, sval
) >= 0)
1170 } END_FOR_EACH_PTR(tmp
);
1174 void tack_on(struct range_list
**list
, struct data_range
*drange
)
1176 add_ptr_list(list
, drange
);
1179 void push_rl(struct range_list_stack
**rl_stack
, struct range_list
*rl
)
1181 add_ptr_list(rl_stack
, rl
);
1184 struct range_list
*pop_rl(struct range_list_stack
**rl_stack
)
1186 struct range_list
*rl
;
1188 rl
= last_ptr_list((struct ptr_list
*)*rl_stack
);
1189 delete_ptr_list_last((struct ptr_list
**)rl_stack
);
1193 struct range_list
*top_rl(struct range_list_stack
*rl_stack
)
1195 struct range_list
*rl
;
1197 rl
= last_ptr_list((struct ptr_list
*)rl_stack
);
1201 void filter_top_rl(struct range_list_stack
**rl_stack
, struct range_list
*filter
)
1203 struct range_list
*rl
;
1205 rl
= pop_rl(rl_stack
);
1206 rl
= rl_filter(rl
, filter
);
1207 push_rl(rl_stack
, rl
);
1210 struct range_list
*rl_truncate_cast(struct symbol
*type
, struct range_list
*rl
)
1212 struct data_range
*tmp
;
1213 struct range_list
*ret
= NULL
;
1219 if (!type
|| type
== rl_type(rl
))
1222 FOR_EACH_PTR(rl
, tmp
) {
1225 if (type_bits(type
) < type_bits(rl_type(rl
))) {
1226 min
.uvalue
= tmp
->min
.uvalue
& ((1ULL << type_bits(type
)) - 1);
1227 max
.uvalue
= tmp
->max
.uvalue
& ((1ULL << type_bits(type
)) - 1);
1229 if (sval_cmp(min
, max
) > 0) {
1230 min
= sval_cast(type
, min
);
1231 max
= sval_cast(type
, max
);
1233 add_range_t(type
, &ret
, min
, max
);
1234 } END_FOR_EACH_PTR(tmp
);
1239 static int rl_is_sane(struct range_list
*rl
)
1241 struct data_range
*tmp
;
1242 struct symbol
*type
;
1245 FOR_EACH_PTR(rl
, tmp
) {
1246 if (!sval_fits(type
, tmp
->min
))
1248 if (!sval_fits(type
, tmp
->max
))
1250 if (sval_cmp(tmp
->min
, tmp
->max
) > 0)
1252 } END_FOR_EACH_PTR(tmp
);
1257 static int rl_type_consistent(struct range_list
*rl
)
1259 struct data_range
*tmp
;
1260 struct symbol
*type
;
1263 FOR_EACH_PTR(rl
, tmp
) {
1264 if (type
!= tmp
->min
.type
|| type
!= tmp
->max
.type
)
1266 } END_FOR_EACH_PTR(tmp
);
1270 static struct range_list
*cast_to_bool(struct range_list
*rl
)
1272 struct data_range
*tmp
;
1273 struct range_list
*ret
= NULL
;
1276 sval_t min
= { .type
= &bool_ctype
};
1277 sval_t max
= { .type
= &bool_ctype
};
1279 FOR_EACH_PTR(rl
, tmp
) {
1280 if (tmp
->min
.value
|| tmp
->max
.value
)
1282 if (sval_is_negative(tmp
->min
) &&
1283 sval_is_negative(tmp
->max
))
1285 if (tmp
->min
.value
== 0 ||
1286 tmp
->max
.value
== 0)
1288 if (sval_is_negative(tmp
->min
) &&
1291 } END_FOR_EACH_PTR(tmp
);
1298 add_range(&ret
, min
, max
);
1302 struct range_list
*cast_rl(struct symbol
*type
, struct range_list
*rl
)
1304 struct data_range
*tmp
;
1305 struct range_list
*ret
= NULL
;
1312 if (!rl_is_sane(rl
))
1313 return alloc_whole_rl(type
);
1314 if (type
== rl_type(rl
) && rl_type_consistent(rl
))
1317 if (type
== &bool_ctype
)
1318 return cast_to_bool(rl
);
1320 FOR_EACH_PTR(rl
, tmp
) {
1321 add_range_t(type
, &ret
, tmp
->min
, tmp
->max
);
1322 } END_FOR_EACH_PTR(tmp
);
1325 return alloc_whole_rl(type
);
1330 struct range_list
*rl_invert(struct range_list
*orig
)
1332 struct range_list
*ret
= NULL
;
1333 struct data_range
*tmp
;
1334 sval_t gap_min
, abs_max
, sval
;
1338 if (type_bits(rl_type(orig
)) < 0) /* void type mostly */
1341 gap_min
= sval_type_min(rl_min(orig
).type
);
1342 abs_max
= sval_type_max(rl_max(orig
).type
);
1344 FOR_EACH_PTR(orig
, tmp
) {
1345 if (sval_cmp(tmp
->min
, gap_min
) > 0) {
1346 sval
= sval_type_val(tmp
->min
.type
, tmp
->min
.value
- 1);
1347 add_range(&ret
, gap_min
, sval
);
1349 if (sval_cmp(tmp
->max
, abs_max
) == 0)
1351 gap_min
= sval_type_val(tmp
->max
.type
, tmp
->max
.value
+ 1);
1352 } END_FOR_EACH_PTR(tmp
);
1354 if (sval_cmp(gap_min
, abs_max
) <= 0)
1355 add_range(&ret
, gap_min
, abs_max
);
1360 struct range_list
*rl_filter(struct range_list
*rl
, struct range_list
*filter
)
1362 struct data_range
*tmp
;
1364 FOR_EACH_PTR(filter
, tmp
) {
1365 rl
= remove_range(rl
, tmp
->min
, tmp
->max
);
1366 } END_FOR_EACH_PTR(tmp
);
1371 struct range_list
*rl_intersection(struct range_list
*one
, struct range_list
*two
)
1373 struct range_list
*one_orig
;
1374 struct range_list
*two_orig
;
1375 struct range_list
*ret
;
1376 struct symbol
*ret_type
;
1377 struct symbol
*small_type
;
1378 struct symbol
*large_type
;
1388 ret_type
= rl_type(one
);
1389 small_type
= rl_type(one
);
1390 large_type
= rl_type(two
);
1392 if (type_bits(rl_type(two
)) < type_bits(small_type
)) {
1393 small_type
= rl_type(two
);
1394 large_type
= rl_type(one
);
1397 one
= cast_rl(large_type
, one
);
1398 two
= cast_rl(large_type
, two
);
1401 one
= rl_invert(one
);
1402 two
= rl_invert(two
);
1404 ret
= rl_filter(ret
, one
);
1405 ret
= rl_filter(ret
, two
);
1407 one
= cast_rl(small_type
, one_orig
);
1408 two
= cast_rl(small_type
, two_orig
);
1410 one
= rl_invert(one
);
1411 two
= rl_invert(two
);
1413 ret
= cast_rl(small_type
, ret
);
1414 ret
= rl_filter(ret
, one
);
1415 ret
= rl_filter(ret
, two
);
1417 return cast_rl(ret_type
, ret
);
1420 static struct range_list
*handle_mod_rl(struct range_list
*left
, struct range_list
*right
)
1425 max
= rl_max(right
);
1426 if (sval_is_max(max
))
1431 if (sval_is_negative(max
))
1433 if (sval_cmp(rl_max(left
), max
) < 0)
1437 return alloc_rl(zero
, max
);
1440 static struct range_list
*get_neg_rl(struct range_list
*rl
)
1442 struct data_range
*tmp
;
1443 struct data_range
*new;
1444 struct range_list
*ret
= NULL
;
1448 if (sval_is_positive(rl_min(rl
)))
1451 FOR_EACH_PTR(rl
, tmp
) {
1452 if (sval_is_positive(tmp
->min
))
1454 if (sval_is_positive(tmp
->max
)) {
1455 new = alloc_range(tmp
->min
, tmp
->max
);
1456 new->max
.value
= -1;
1457 add_range(&ret
, new->min
, new->max
);
1460 add_range(&ret
, tmp
->min
, tmp
->max
);
1461 } END_FOR_EACH_PTR(tmp
);
1466 static struct range_list
*get_pos_rl(struct range_list
*rl
)
1468 struct data_range
*tmp
;
1469 struct data_range
*new;
1470 struct range_list
*ret
= NULL
;
1474 if (sval_is_negative(rl_max(rl
)))
1477 FOR_EACH_PTR(rl
, tmp
) {
1478 if (sval_is_negative(tmp
->max
))
1480 if (sval_is_positive(tmp
->min
)) {
1481 add_range(&ret
, tmp
->min
, tmp
->max
);
1484 new = alloc_range(tmp
->min
, tmp
->max
);
1486 add_range(&ret
, new->min
, new->max
);
1487 } END_FOR_EACH_PTR(tmp
);
1492 static struct range_list
*divide_rl_helper(struct range_list
*left
, struct range_list
*right
)
1494 sval_t right_min
, right_max
;
1497 if (!left
|| !right
)
1500 /* let's assume we never divide by zero */
1501 right_min
= rl_min(right
);
1502 right_max
= rl_max(right
);
1503 if (right_min
.value
== 0 && right_max
.value
== 0)
1505 if (right_min
.value
== 0)
1506 right_min
.value
= 1;
1507 if (right_max
.value
== 0)
1508 right_max
.value
= -1;
1510 max
= sval_binop(rl_max(left
), '/', right_min
);
1511 min
= sval_binop(rl_min(left
), '/', right_max
);
1513 return alloc_rl(min
, max
);
1516 static struct range_list
*handle_divide_rl(struct range_list
*left
, struct range_list
*right
)
1518 struct range_list
*left_neg
, *left_pos
, *right_neg
, *right_pos
;
1519 struct range_list
*neg_neg
, *neg_pos
, *pos_neg
, *pos_pos
;
1520 struct range_list
*ret
;
1522 if (is_whole_rl(right
))
1525 left_neg
= get_neg_rl(left
);
1526 left_pos
= get_pos_rl(left
);
1527 right_neg
= get_neg_rl(right
);
1528 right_pos
= get_pos_rl(right
);
1530 neg_neg
= divide_rl_helper(left_neg
, right_neg
);
1531 neg_pos
= divide_rl_helper(left_neg
, right_pos
);
1532 pos_neg
= divide_rl_helper(left_pos
, right_neg
);
1533 pos_pos
= divide_rl_helper(left_pos
, right_pos
);
1535 ret
= rl_union(neg_neg
, neg_pos
);
1536 ret
= rl_union(ret
, pos_neg
);
1537 return rl_union(ret
, pos_pos
);
1540 static struct range_list
*handle_add_mult_rl(struct range_list
*left
, int op
, struct range_list
*right
)
1544 if (sval_binop_overflows(rl_min(left
), op
, rl_min(right
)))
1546 min
= sval_binop(rl_min(left
), op
, rl_min(right
));
1548 if (sval_binop_overflows(rl_max(left
), op
, rl_max(right
)))
1550 max
= sval_binop(rl_max(left
), op
, rl_max(right
));
1552 return alloc_rl(min
, max
);
1555 static struct range_list
*handle_sub_rl(struct range_list
*left_orig
, struct range_list
*right_orig
)
1557 struct range_list
*left_rl
, *right_rl
;
1558 struct symbol
*type
;
1560 sval_t min_ll
, max_ll
, res_ll
;
1563 /* TODO: These things should totally be using dranges where possible */
1565 if (!left_orig
|| !right_orig
)
1569 if (type_positive_bits(rl_type(left_orig
)) > type_positive_bits(type
))
1570 type
= rl_type(left_orig
);
1571 if (type_positive_bits(rl_type(right_orig
)) > type_positive_bits(type
))
1572 type
= rl_type(right_orig
);
1574 left_rl
= cast_rl(type
, left_orig
);
1575 right_rl
= cast_rl(type
, right_orig
);
1577 max
= rl_max(left_rl
);
1578 min
= sval_type_min(type
);
1580 min_ll
= rl_min(left_rl
);
1581 min_ll
.type
= &llong_ctype
;
1582 max_ll
= rl_max(right_rl
);
1583 max_ll
.type
= &llong_ctype
;
1585 res_ll
.value
= min_ll
.value
- max_ll
.value
;
1587 if (!sval_binop_overflows(rl_min(left_rl
), '-', rl_max(right_rl
))) {
1588 tmp
= sval_binop(rl_min(left_rl
), '-', rl_max(right_rl
));
1589 if (sval_cmp(tmp
, min
) > 0)
1591 } else if (type_positive_bits(type
) < 63 &&
1592 !sval_binop_overflows(min_ll
, '-', max_ll
) &&
1593 (min
.value
!= 0 && sval_cmp(res_ll
, min
) >= 0)) {
1594 struct range_list
*left_casted
, *right_casted
, *result
;
1596 left_casted
= cast_rl(&llong_ctype
, left_orig
);
1597 right_casted
= cast_rl(&llong_ctype
, right_orig
);
1598 result
= handle_sub_rl(left_casted
, right_casted
);
1599 return cast_rl(type
, result
);
1602 if (!sval_is_max(rl_max(left_rl
))) {
1603 tmp
= sval_binop(rl_max(left_rl
), '-', rl_min(right_rl
));
1604 if (sval_cmp(tmp
, max
) < 0)
1608 if (sval_is_min(min
) && sval_is_max(max
))
1611 return alloc_rl(min
, max
);
1614 static unsigned long long rl_bits_always_set(struct range_list
*rl
)
1616 return sval_fls_mask(rl_min(rl
));
1619 static unsigned long long rl_bits_maybe_set(struct range_list
*rl
)
1621 return sval_fls_mask(rl_max(rl
));
1624 static struct range_list
*handle_OR_rl(struct range_list
*left
, struct range_list
*right
)
1626 unsigned long long left_min
, left_max
, right_min
, right_max
;
1630 if ((rl_to_sval(left
, &sval
) || rl_to_sval(right
, &sval
)) &&
1631 !sval_binop_overflows(rl_max(left
), '+', rl_max(right
)))
1632 return rl_binop(left
, '+', right
);
1634 left_min
= rl_bits_always_set(left
);
1635 left_max
= rl_bits_maybe_set(left
);
1636 right_min
= rl_bits_always_set(right
);
1637 right_max
= rl_bits_maybe_set(right
);
1639 min
.type
= max
.type
= &ullong_ctype
;
1640 min
.uvalue
= left_min
| right_min
;
1641 max
.uvalue
= left_max
| right_max
;
1643 return cast_rl(rl_type(left
), alloc_rl(min
, max
));
1646 static struct range_list
*handle_XOR_rl(struct range_list
*left
, struct range_list
*right
)
1648 unsigned long long left_set
, left_maybe
;
1649 unsigned long long right_set
, right_maybe
;
1652 left_set
= rl_bits_always_set(left
);
1653 left_maybe
= rl_bits_maybe_set(left
);
1655 right_set
= rl_bits_always_set(right
);
1656 right_maybe
= rl_bits_maybe_set(right
);
1658 zero
= max
= rl_min(left
);
1660 max
.uvalue
= fls_mask((left_maybe
| right_maybe
) ^ (left_set
& right_set
));
1662 return cast_rl(rl_type(left
), alloc_rl(zero
, max
));
1665 static struct range_list
*handle_AND_rl(struct range_list
*left
, struct range_list
*right
)
1667 unsigned long long left_set
, left_maybe
;
1668 unsigned long long right_set
, right_maybe
;
1673 left_set
= rl_bits_always_set(left
);
1674 left_maybe
= rl_bits_maybe_set(left
);
1676 right_set
= rl_bits_always_set(right
);
1677 right_maybe
= rl_bits_maybe_set(right
);
1679 zero
= max
= rl_min(left
);
1681 max
.uvalue
= fls_mask((left_maybe
| right_maybe
) ^ (left_set
& right_set
));
1683 return cast_rl(rl_type(left
), alloc_rl(zero
, max
));
1686 struct range_list
*rl_binop(struct range_list
*left
, int op
, struct range_list
*right
)
1688 struct symbol
*cast_type
;
1689 sval_t left_sval
, right_sval
;
1690 struct range_list
*ret
= NULL
;
1692 cast_type
= rl_type(left
);
1693 if (sval_type_max(rl_type(left
)).uvalue
< sval_type_max(rl_type(right
)).uvalue
)
1694 cast_type
= rl_type(right
);
1695 if (sval_type_max(cast_type
).uvalue
< INT_MAX
)
1696 cast_type
= &int_ctype
;
1698 left
= cast_rl(cast_type
, left
);
1699 right
= cast_rl(cast_type
, right
);
1701 if (!left
&& !right
)
1704 if (rl_to_sval(left
, &left_sval
) && rl_to_sval(right
, &right_sval
)) {
1705 sval_t val
= sval_binop(left_sval
, op
, right_sval
);
1706 return alloc_rl(val
, val
);
1711 ret
= handle_mod_rl(left
, right
);
1714 ret
= handle_divide_rl(left
, right
);
1718 ret
= handle_add_mult_rl(left
, op
, right
);
1721 ret
= handle_OR_rl(left
, right
);
1724 ret
= handle_XOR_rl(left
, right
);
1727 ret
= handle_AND_rl(left
, right
);
1730 ret
= handle_sub_rl(left
, right
);
1732 /* FIXME: Do the rest as well */
1733 case SPECIAL_RIGHTSHIFT
:
1734 case SPECIAL_LEFTSHIFT
:
1741 void free_data_info_allocs(void)
1743 struct allocator_struct
*desc
= &data_info_allocator
;
1744 struct allocation_blob
*blob
= desc
->blobs
;
1750 desc
->allocations
= 0;
1751 desc
->total_bytes
= 0;
1752 desc
->useful_bytes
= 0;
1753 desc
->freelist
= NULL
;
1755 struct allocation_blob
*next
= blob
->next
;
1756 blob_free(blob
, desc
->chunking
);
1759 clear_data_range_alloc();
1762 void split_comparison_rl(struct range_list
*left_orig
, int op
, struct range_list
*right_orig
,
1763 struct range_list
**left_true_rl
, struct range_list
**left_false_rl
,
1764 struct range_list
**right_true_rl
, struct range_list
**right_false_rl
)
1766 struct range_list
*left_true
, *left_false
;
1767 struct range_list
*right_true
, *right_false
;
1770 min
= sval_type_min(rl_type(left_orig
));
1771 max
= sval_type_max(rl_type(left_orig
));
1773 left_true
= clone_rl(left_orig
);
1774 left_false
= clone_rl(left_orig
);
1775 right_true
= clone_rl(right_orig
);
1776 right_false
= clone_rl(right_orig
);
1780 case SPECIAL_UNSIGNED_LT
:
1781 left_true
= remove_range(left_orig
, rl_max(right_orig
), max
);
1782 if (!sval_is_min(rl_min(right_orig
))) {
1783 left_false
= remove_range(left_orig
, min
, sub_one(rl_min(right_orig
)));
1786 right_true
= remove_range(right_orig
, min
, rl_min(left_orig
));
1787 if (!sval_is_max(rl_max(left_orig
)))
1788 right_false
= remove_range(right_orig
, add_one(rl_max(left_orig
)), max
);
1790 case SPECIAL_UNSIGNED_LTE
:
1792 if (!sval_is_max(rl_max(right_orig
)))
1793 left_true
= remove_range(left_orig
, add_one(rl_max(right_orig
)), max
);
1794 left_false
= remove_range(left_orig
, min
, rl_min(right_orig
));
1796 if (!sval_is_min(rl_min(left_orig
)))
1797 right_true
= remove_range(right_orig
, min
, sub_one(rl_min(left_orig
)));
1798 right_false
= remove_range(right_orig
, rl_max(left_orig
), max
);
1800 if (sval_cmp(rl_min(left_orig
), rl_min(right_orig
)) == 0)
1801 left_false
= remove_range(left_false
, rl_min(left_orig
), rl_min(left_orig
));
1802 if (sval_cmp(rl_max(left_orig
), rl_max(right_orig
)) == 0)
1803 right_false
= remove_range(right_false
, rl_max(left_orig
), rl_max(left_orig
));
1806 left_true
= rl_intersection(left_orig
, right_orig
);
1807 right_true
= clone_rl(left_true
);
1809 if (sval_cmp(rl_min(right_orig
), rl_max(right_orig
)) == 0)
1810 left_false
= remove_range(left_orig
, rl_min(right_orig
), rl_min(right_orig
));
1811 if (sval_cmp(rl_min(left_orig
), rl_max(left_orig
)) == 0)
1812 right_false
= remove_range(right_orig
, rl_min(left_orig
), rl_min(left_orig
));
1814 case SPECIAL_UNSIGNED_GTE
:
1816 if (!sval_is_min(rl_min(right_orig
)))
1817 left_true
= remove_range(left_orig
, min
, sub_one(rl_min(right_orig
)));
1818 left_false
= remove_range(left_orig
, rl_max(right_orig
), max
);
1820 if (!sval_is_max(rl_max(left_orig
)))
1821 right_true
= remove_range(right_orig
, add_one(rl_max(left_orig
)), max
);
1822 right_false
= remove_range(right_orig
, min
, rl_min(left_orig
));
1824 if (sval_cmp(rl_min(left_orig
), rl_min(right_orig
)) == 0)
1825 right_false
= remove_range(right_false
, rl_min(left_orig
), rl_min(left_orig
));
1826 if (sval_cmp(rl_max(left_orig
), rl_max(right_orig
)) == 0)
1827 left_false
= remove_range(left_false
, rl_max(left_orig
), rl_max(left_orig
));
1830 case SPECIAL_UNSIGNED_GT
:
1831 left_true
= remove_range(left_orig
, min
, rl_min(right_orig
));
1832 if (!sval_is_max(rl_max(right_orig
)))
1833 left_false
= remove_range(left_orig
, add_one(rl_max(right_orig
)), max
);
1835 right_true
= remove_range(right_orig
, rl_max(left_orig
), max
);
1836 if (!sval_is_min(rl_min(left_orig
)))
1837 right_false
= remove_range(right_orig
, min
, sub_one(rl_min(left_orig
)));
1839 case SPECIAL_NOTEQUAL
:
1840 left_false
= rl_intersection(left_orig
, right_orig
);
1841 right_false
= clone_rl(left_false
);
1843 if (sval_cmp(rl_min(right_orig
), rl_max(right_orig
)) == 0)
1844 left_true
= remove_range(left_orig
, rl_min(right_orig
), rl_min(right_orig
));
1845 if (sval_cmp(rl_min(left_orig
), rl_max(left_orig
)) == 0)
1846 right_true
= remove_range(right_orig
, rl_min(left_orig
), rl_min(left_orig
));
1849 sm_perror(" unhandled comparison %d", op
);
1854 *left_true_rl
= left_true
;
1855 *left_false_rl
= left_false
;
1857 if (right_true_rl
) {
1858 *right_true_rl
= right_true
;
1859 *right_false_rl
= right_false
;