2 * Copyright (C) 2009 Dan Carpenter.
4 * This program is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU General Public License
6 * as published by the Free Software Foundation; either version 2
7 * of the License, or (at your option) any later version.
9 * This program is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 * GNU General Public License for more details.
14 * You should have received a copy of the GNU General Public License
15 * along with this program; if not, see http://www.gnu.org/copyleft/gpl.txt
20 #include "smatch_extra.h"
21 #include "smatch_slist.h"
23 ALLOCATOR(data_info
, "smatch extra data");
24 ALLOCATOR(data_range
, "data range");
25 __DO_ALLOCATOR(struct data_range
, sizeof(struct data_range
), __alignof__(struct data_range
),
26 "permanent ranges", perm_data_range
);
27 static struct range_list
*cast_rl_private(struct symbol
*type
, struct range_list
*rl
);
29 char *show_rl(struct range_list
*list
)
31 struct data_range
*tmp
;
36 full
[sizeof(full
) - 1] = '\0';
37 FOR_EACH_PTR(list
, tmp
) {
39 strncat(full
, ",", 254 - strlen(full
));
40 if (sval_cmp(tmp
->min
, tmp
->max
) == 0) {
41 strncat(full
, sval_to_str(tmp
->min
), 254 - strlen(full
));
44 strncat(full
, sval_to_str(tmp
->min
), 254 - strlen(full
));
45 strncat(full
, "-", 254 - strlen(full
));
46 strncat(full
, sval_to_str(tmp
->max
), 254 - strlen(full
));
47 } END_FOR_EACH_PTR(tmp
);
48 if (strlen(full
) == sizeof(full
) - 1)
49 full
[sizeof(full
) - 2] = '+';
50 return alloc_sname(full
);
53 struct range_list_stack
*big_rl_stack
;
55 static void record_rl(struct range_list
*rl
)
61 push_rl(&big_rl_stack
, rl
);
64 void free_all_rl(void)
66 struct range_list
*rl
;
68 FOR_EACH_PTR(big_rl_stack
, rl
) {
70 } END_FOR_EACH_PTR(rl
);
72 free_ptr_list(&big_rl_stack
);
75 static int sval_too_big(struct symbol
*type
, sval_t sval
)
77 if (type_bits(type
) >= 32 &&
78 type_bits(sval
.type
) <= type_bits(type
))
80 if (sval
.uvalue
<= ((1ULL << type_bits(type
)) - 1))
82 if (type_signed(sval
.type
)) {
83 if (type_unsigned(type
)) {
84 unsigned long long neg
= ~sval
.uvalue
;
85 if (neg
<= sval_type_max(type
).uvalue
)
88 if (sval
.value
< sval_type_min(type
).value
)
90 if (sval
.value
> sval_type_max(type
).value
)
94 if (sval
.uvalue
> sval_type_max(type
).uvalue
)
99 static void add_range_t(struct symbol
*type
, struct range_list
**rl
, sval_t min
, sval_t max
)
101 /* If we're just adding a number, cast it and add it */
102 if (sval_cmp(min
, max
) == 0) {
103 add_range(rl
, sval_cast(type
, min
), sval_cast(type
, max
));
107 /* If the range is within the type range then add it */
108 if (sval_fits(type
, min
) && sval_fits(type
, max
)) {
109 add_range(rl
, sval_cast(type
, min
), sval_cast(type
, max
));
114 * If the range we are adding has more bits than the range type then
115 * add the whole range type. Eg:
116 * 0x8000000000000000 - 0xf000000000000000 -> cast to int
117 * This isn't totally the right thing to do. We could be more granular.
119 if (sval_too_big(type
, min
) || sval_too_big(type
, max
)) {
120 add_range(rl
, sval_type_min(type
), sval_type_max(type
));
124 /* Cast negative values to high positive values */
125 if (sval_is_negative(min
) && type_unsigned(type
)) {
126 if (sval_is_positive(max
)) {
127 if (sval_too_high(type
, max
)) {
128 add_range(rl
, sval_type_min(type
), sval_type_max(type
));
131 add_range(rl
, sval_type_val(type
, 0), sval_cast(type
, max
));
132 max
= sval_type_max(type
);
134 max
= sval_cast(type
, max
);
136 min
= sval_cast(type
, min
);
137 add_range(rl
, min
, max
);
140 /* Cast high positive numbers to negative */
141 if (sval_unsigned(max
) && sval_is_negative(sval_cast(type
, max
))) {
142 if (!sval_is_negative(sval_cast(type
, min
))) {
143 add_range(rl
, sval_cast(type
, min
), sval_type_max(type
));
144 min
= sval_type_min(type
);
146 min
= sval_cast(type
, min
);
148 max
= sval_cast(type
, max
);
149 add_range(rl
, min
, max
);
152 add_range(rl
, sval_cast(type
, min
), sval_cast(type
, max
));
156 static int str_to_comparison_arg_helper(const char *str
,
157 struct expression
*call
, int *comparison
,
158 struct expression
**arg
, char **endp
)
161 char *c
= (char *)str
;
170 *comparison
= SPECIAL_LTE
;
175 } else if (*c
== '=') {
178 *comparison
= SPECIAL_EQUAL
;
179 } else if (*c
== '>') {
182 *comparison
= SPECIAL_GTE
;
187 } else if (*c
== '!') {
190 *comparison
= SPECIAL_NOTEQUAL
;
199 param
= strtoll(c
, &c
, 10);
200 c
++; /* skip the ']' character */
206 *arg
= get_argument_from_call_expr(call
->args
, param
);
212 int str_to_comparison_arg(const char *str
, struct expression
*call
, int *comparison
, struct expression
**arg
)
221 return str_to_comparison_arg_helper(str
, call
, comparison
, arg
, NULL
);
224 static int get_val_from_key(int use_max
, struct symbol
*type
, char *c
, struct expression
*call
, char **endp
, sval_t
*sval
)
226 struct expression
*arg
;
231 ret
= sval_type_max(type
);
233 ret
= sval_type_min(type
);
235 if (!str_to_comparison_arg_helper(c
, call
, &comparison
, &arg
, endp
)) {
240 if (use_max
&& get_implied_max(arg
, &tmp
)) {
242 if (comparison
== '<') {
244 ret
= sval_binop(ret
, '-', tmp
);
247 if (!use_max
&& get_implied_min(arg
, &tmp
)) {
249 if (comparison
== '>') {
251 ret
= sval_binop(ret
, '+', tmp
);
259 static sval_t
add_one(sval_t sval
)
265 static sval_t
sub_one(sval_t sval
)
271 void filter_by_comparison(struct range_list
**rl
, int comparison
, struct range_list
*right
)
273 struct range_list
*left_orig
= *rl
;
274 struct range_list
*right_orig
= right
;
275 struct range_list
*ret_rl
= *rl
;
276 struct symbol
*cast_type
;
279 cast_type
= rl_type(left_orig
);
280 if (sval_type_max(rl_type(left_orig
)).uvalue
< sval_type_max(rl_type(right_orig
)).uvalue
)
281 cast_type
= rl_type(right_orig
);
282 if (sval_type_max(cast_type
).uvalue
< INT_MAX
)
283 cast_type
= &int_ctype
;
285 min
= sval_type_min(cast_type
);
286 max
= sval_type_max(cast_type
);
287 left_orig
= cast_rl(cast_type
, left_orig
);
288 right_orig
= cast_rl(cast_type
, right_orig
);
290 switch (comparison
) {
292 case SPECIAL_UNSIGNED_LT
:
293 ret_rl
= remove_range(left_orig
, rl_max(right_orig
), max
);
296 case SPECIAL_UNSIGNED_LTE
:
297 if (!sval_is_max(rl_max(right_orig
)))
298 ret_rl
= remove_range(left_orig
, add_one(rl_max(right_orig
)), max
);
301 if (!sval_is_max(rl_max(right_orig
)))
302 ret_rl
= remove_range(ret_rl
, add_one(rl_max(right_orig
)), max
);
303 if (!sval_is_min(rl_min(right_orig
)))
304 ret_rl
= remove_range(ret_rl
, min
, sub_one(rl_min(right_orig
)));
307 case SPECIAL_UNSIGNED_GTE
:
308 if (!sval_is_min(rl_min(right_orig
)))
309 ret_rl
= remove_range(left_orig
, min
, sub_one(rl_min(right_orig
)));
312 case SPECIAL_UNSIGNED_GT
:
313 ret_rl
= remove_range(left_orig
, min
, rl_min(right_orig
));
315 case SPECIAL_NOTEQUAL
:
316 if (sval_cmp(rl_min(right_orig
), rl_max(right_orig
)) == 0)
317 ret_rl
= remove_range(left_orig
, rl_min(right_orig
), rl_min(right_orig
));
320 sm_msg("internal error: unhandled comparison %s", show_special(comparison
));
324 *rl
= cast_rl(rl_type(*rl
), ret_rl
);
327 static struct range_list
*filter_by_comparison_call(char *c
, struct expression
*call
, char **endp
, struct range_list
*start_rl
)
330 struct expression
*arg
;
331 struct range_list
*casted_start
, *right_orig
;
334 if (!str_to_comparison_arg_helper(c
, call
, &comparison
, &arg
, endp
))
337 if (!get_implied_rl(arg
, &right_orig
))
341 if (type_positive_bits(rl_type(start_rl
)) > type_positive_bits(type
))
342 type
= rl_type(start_rl
);
343 if (type_positive_bits(rl_type(right_orig
)) > type_positive_bits(type
))
344 type
= rl_type(right_orig
);
346 casted_start
= cast_rl(type
, start_rl
);
347 right_orig
= cast_rl(type
, right_orig
);
349 filter_by_comparison(&casted_start
, comparison
, right_orig
);
350 return cast_rl(rl_type(start_rl
), casted_start
);
353 static sval_t
parse_val(int use_max
, struct expression
*call
, struct symbol
*type
, char *c
, char **endp
)
358 if (!strncmp(start
, "max", 3)) {
359 ret
= sval_type_max(type
);
361 } else if (!strncmp(start
, "u64max", 6)) {
362 ret
= sval_type_val(type
, ULLONG_MAX
);
364 } else if (!strncmp(start
, "s64max", 6)) {
365 ret
= sval_type_val(type
, LLONG_MAX
);
367 } else if (!strncmp(start
, "u32max", 6)) {
368 ret
= sval_type_val(type
, UINT_MAX
);
370 } else if (!strncmp(start
, "s32max", 6)) {
371 ret
= sval_type_val(type
, INT_MAX
);
373 } else if (!strncmp(start
, "u16max", 6)) {
374 ret
= sval_type_val(type
, USHRT_MAX
);
376 } else if (!strncmp(start
, "s16max", 6)) {
377 ret
= sval_type_val(type
, SHRT_MAX
);
379 } else if (!strncmp(start
, "min", 3)) {
380 ret
= sval_type_min(type
);
382 } else if (!strncmp(start
, "s64min", 6)) {
383 ret
= sval_type_val(type
, LLONG_MIN
);
385 } else if (!strncmp(start
, "s32min", 6)) {
386 ret
= sval_type_val(type
, INT_MIN
);
388 } else if (!strncmp(start
, "s16min", 6)) {
389 ret
= sval_type_val(type
, SHRT_MIN
);
391 } else if (!strncmp(start
, "long_min", 8)) {
392 ret
= sval_type_val(type
, LONG_MIN
);
394 } else if (!strncmp(start
, "long_max", 8)) {
395 ret
= sval_type_val(type
, LONG_MAX
);
397 } else if (!strncmp(start
, "ulong_max", 9)) {
398 ret
= sval_type_val(type
, ULONG_MAX
);
400 } else if (!strncmp(start
, "ptr_max", 7)) {
401 ret
= sval_type_val(type
, valid_ptr_max
);
403 } else if (start
[0] == '[') {
404 /* this parses [==p0] comparisons */
405 get_val_from_key(1, type
, start
, call
, &c
, &ret
);
406 } else if (type_positive_bits(type
) == 64) {
407 ret
= sval_type_val(type
, strtoull(start
, &c
, 10));
409 ret
= sval_type_val(type
, strtoll(start
, &c
, 10));
415 static char *jump_to_call_math(char *value
)
419 while (*c
&& *c
!= '[')
425 if (*c
== '<' || *c
== '=' || *c
== '>' || *c
== '!')
431 static void str_to_rl_helper(struct expression
*call
, struct symbol
*type
, char *str
, char **endp
, struct range_list
**rl
)
433 struct range_list
*rl_tmp
= NULL
;
437 min
= sval_type_min(type
);
438 max
= sval_type_max(type
);
440 while (*c
!= '\0' && *c
!= '[') {
442 if (sval_cmp(min
, sval_type_min(type
)) != 0)
444 max
= sval_type_max(type
);
445 add_range_t(type
, &rl_tmp
, min
, max
);
450 min
= parse_val(0, call
, type
, c
, &c
);
451 if (!sval_fits(type
, min
))
452 min
= sval_type_min(type
);
456 if (*c
== '\0' || *c
== '[') {
457 add_range_t(type
, &rl_tmp
, min
, min
);
461 add_range_t(type
, &rl_tmp
, min
, min
);
466 min
= sval_type_max(type
);
470 sm_msg("debug XXX: trouble parsing %s c = %s", str
, c
);
476 max
= parse_val(1, call
, type
, c
, &c
);
477 if (!sval_fits(type
, max
))
478 max
= sval_type_max(type
);
480 max
= sval_type_max(type
);
483 add_range_t(type
, &rl_tmp
, min
, max
);
494 static void str_to_dinfo(struct expression
*call
, struct symbol
*type
, char *value
, struct data_info
*dinfo
)
496 struct range_list
*math_rl
;
499 struct range_list
*rl
= NULL
;
504 if (strcmp(value
, "empty") == 0)
507 if (strncmp(value
, "[==$", 4) == 0) {
508 struct expression
*arg
;
511 if (!str_to_comparison_arg(value
, call
, &comparison
, &arg
))
513 if (!get_implied_rl(arg
, &rl
))
518 str_to_rl_helper(call
, type
, value
, &c
, &rl
);
522 call_math
= jump_to_call_math(value
);
523 if (call_math
&& parse_call_math_rl(call
, call_math
, &math_rl
)) {
524 rl
= rl_intersection(rl
, math_rl
);
529 * For now if we already tried to handle the call math and couldn't
530 * figure it out then bail.
532 if (jump_to_call_math(c
) == c
+ 1)
535 rl
= filter_by_comparison_call(c
, call
, &c
, rl
);
538 rl
= cast_rl(type
, rl
);
539 dinfo
->value_ranges
= rl
;
542 void str_to_rl(struct symbol
*type
, char *value
, struct range_list
**rl
)
544 struct data_info dinfo
= {};
546 str_to_dinfo(NULL
, type
, value
, &dinfo
);
547 *rl
= dinfo
.value_ranges
;
550 void call_results_to_rl(struct expression
*expr
, struct symbol
*type
, char *value
, struct range_list
**rl
)
552 struct data_info dinfo
= {};
554 str_to_dinfo(strip_expr(expr
), type
, value
, &dinfo
);
555 *rl
= dinfo
.value_ranges
;
558 int is_whole_rl(struct range_list
*rl
)
560 struct data_range
*drange
;
562 if (ptr_list_empty(rl
))
564 drange
= first_ptr_list((struct ptr_list
*)rl
);
565 if (sval_is_min(drange
->min
) && sval_is_max(drange
->max
))
570 int is_whole_rl_non_zero(struct range_list
*rl
)
572 struct data_range
*drange
;
574 if (ptr_list_empty(rl
))
576 drange
= first_ptr_list((struct ptr_list
*)rl
);
577 if (sval_unsigned(drange
->min
) &&
578 drange
->min
.value
== 1 &&
579 sval_is_max(drange
->max
))
581 if (!sval_is_min(drange
->min
) || drange
->max
.value
!= -1)
583 drange
= last_ptr_list((struct ptr_list
*)rl
);
584 if (drange
->min
.value
!= 1 || !sval_is_max(drange
->max
))
589 sval_t
rl_min(struct range_list
*rl
)
591 struct data_range
*drange
;
594 ret
.type
= &llong_ctype
;
595 ret
.value
= LLONG_MIN
;
596 if (ptr_list_empty(rl
))
598 drange
= first_ptr_list((struct ptr_list
*)rl
);
602 sval_t
rl_max(struct range_list
*rl
)
604 struct data_range
*drange
;
607 ret
.type
= &llong_ctype
;
608 ret
.value
= LLONG_MAX
;
609 if (ptr_list_empty(rl
))
611 drange
= last_ptr_list((struct ptr_list
*)rl
);
615 int rl_to_sval(struct range_list
*rl
, sval_t
*sval
)
624 if (sval_cmp(min
, max
) != 0)
630 struct symbol
*rl_type(struct range_list
*rl
)
634 return rl_min(rl
).type
;
637 static struct data_range
*alloc_range_helper_sval(sval_t min
, sval_t max
, int perm
)
639 struct data_range
*ret
;
642 ret
= __alloc_perm_data_range(0);
644 ret
= __alloc_data_range(0);
650 struct data_range
*alloc_range(sval_t min
, sval_t max
)
652 return alloc_range_helper_sval(min
, max
, 0);
655 struct data_range
*alloc_range_perm(sval_t min
, sval_t max
)
657 return alloc_range_helper_sval(min
, max
, 1);
660 struct range_list
*alloc_rl(sval_t min
, sval_t max
)
662 struct range_list
*rl
= NULL
;
664 if (sval_cmp(min
, max
) > 0)
665 return alloc_whole_rl(min
.type
);
667 add_range(&rl
, min
, max
);
671 struct range_list
*alloc_whole_rl(struct symbol
*type
)
673 if (!type
|| type_positive_bits(type
) < 0)
675 if (type
->type
== SYM_ARRAY
)
678 return alloc_rl(sval_type_min(type
), sval_type_max(type
));
681 void add_range(struct range_list
**list
, sval_t min
, sval_t max
)
683 struct data_range
*tmp
;
684 struct data_range
*new = NULL
;
688 * There is at least on valid reason why the types might be confusing
689 * and that's when you have a void pointer and on some paths you treat
690 * it as a u8 pointer and on other paths you treat it as a u16 pointer.
691 * This case is hard to deal with.
693 * There are other cases where we probably should be more specific about
694 * the types than we are. For example, we end up merging a lot of ulong
695 * with pointers and I have not figured out why we do that.
697 * But this hack works for both cases, I think. We cast it to pointers
698 * or we use the bigger size.
701 if (*list
&& rl_type(*list
) != min
.type
) {
702 if (rl_type(*list
)->type
== SYM_PTR
) {
703 min
= sval_cast(rl_type(*list
), min
);
704 max
= sval_cast(rl_type(*list
), max
);
705 } else if (min
.type
->type
== SYM_PTR
) {
706 *list
= cast_rl_private(min
.type
, *list
);
707 } else if (type_bits(rl_type(*list
)) >= type_bits(min
.type
)) {
708 min
= sval_cast(rl_type(*list
), min
);
709 max
= sval_cast(rl_type(*list
), max
);
711 *list
= cast_rl_private(min
.type
, *list
);
715 if (sval_cmp(min
, max
) > 0) {
716 min
= sval_type_min(min
.type
);
717 max
= sval_type_max(min
.type
);
721 * FIXME: This has a problem merging a range_list like: min-0,3-max
722 * with a range like 1-2. You end up with min-2,3-max instead of
725 FOR_EACH_PTR(*list
, tmp
) {
727 /* Sometimes we overlap with more than one range
728 so we have to delete or modify the next range. */
729 if (!sval_is_max(max
) && max
.value
+ 1 == tmp
->min
.value
) {
730 /* join 2 ranges here */
732 DELETE_CURRENT_PTR(tmp
);
736 /* Doesn't overlap with the next one. */
737 if (sval_cmp(max
, tmp
->min
) < 0)
740 if (sval_cmp(max
, tmp
->max
) <= 0) {
741 /* Partially overlaps the next one. */
743 DELETE_CURRENT_PTR(tmp
);
746 /* Completely overlaps the next one. */
747 DELETE_CURRENT_PTR(tmp
);
748 /* there could be more ranges to delete */
752 if (!sval_is_max(max
) && max
.value
+ 1 == tmp
->min
.value
) {
753 /* join 2 ranges into a big range */
754 new = alloc_range(min
, tmp
->max
);
755 REPLACE_CURRENT_PTR(tmp
, new);
758 if (sval_cmp(max
, tmp
->min
) < 0) { /* new range entirely below */
759 new = alloc_range(min
, max
);
760 INSERT_CURRENT(new, tmp
);
763 if (sval_cmp(min
, tmp
->min
) < 0) { /* new range partially below */
764 if (sval_cmp(max
, tmp
->max
) < 0)
768 new = alloc_range(min
, max
);
769 REPLACE_CURRENT_PTR(tmp
, new);
774 if (sval_cmp(max
, tmp
->max
) <= 0) /* new range already included */
776 if (sval_cmp(min
, tmp
->max
) <= 0) { /* new range partially above */
778 new = alloc_range(min
, max
);
779 REPLACE_CURRENT_PTR(tmp
, new);
783 if (!sval_is_min(min
) && min
.value
- 1 == tmp
->max
.value
) {
784 /* join 2 ranges into a big range */
785 new = alloc_range(tmp
->min
, max
);
786 REPLACE_CURRENT_PTR(tmp
, new);
790 /* the new range is entirely above the existing ranges */
791 } END_FOR_EACH_PTR(tmp
);
794 new = alloc_range(min
, max
);
795 add_ptr_list(list
, new);
798 struct range_list
*clone_rl(struct range_list
*list
)
800 struct data_range
*tmp
;
801 struct range_list
*ret
= NULL
;
803 FOR_EACH_PTR(list
, tmp
) {
804 add_ptr_list(&ret
, tmp
);
805 } END_FOR_EACH_PTR(tmp
);
810 struct range_list
*clone_rl_permanent(struct range_list
*list
)
812 struct data_range
*tmp
;
813 struct data_range
*new;
814 struct range_list
*ret
= NULL
;
816 FOR_EACH_PTR(list
, tmp
) {
817 new = alloc_range_perm(tmp
->min
, tmp
->max
);
818 add_ptr_list(&ret
, new);
819 } END_FOR_EACH_PTR(tmp
);
823 struct range_list
*rl_union(struct range_list
*one
, struct range_list
*two
)
825 struct data_range
*tmp
;
826 struct range_list
*ret
= NULL
;
828 FOR_EACH_PTR(one
, tmp
) {
829 add_range(&ret
, tmp
->min
, tmp
->max
);
830 } END_FOR_EACH_PTR(tmp
);
831 FOR_EACH_PTR(two
, tmp
) {
832 add_range(&ret
, tmp
->min
, tmp
->max
);
833 } END_FOR_EACH_PTR(tmp
);
838 struct range_list
*remove_range(struct range_list
*list
, sval_t min
, sval_t max
)
840 struct data_range
*tmp
;
841 struct range_list
*ret
= NULL
;
846 min
= sval_cast(rl_type(list
), min
);
847 max
= sval_cast(rl_type(list
), max
);
848 if (sval_cmp(min
, max
) > 0) {
854 FOR_EACH_PTR(list
, tmp
) {
855 if (sval_cmp(tmp
->max
, min
) < 0) {
856 add_range(&ret
, tmp
->min
, tmp
->max
);
859 if (sval_cmp(tmp
->min
, max
) > 0) {
860 add_range(&ret
, tmp
->min
, tmp
->max
);
863 if (sval_cmp(tmp
->min
, min
) >= 0 && sval_cmp(tmp
->max
, max
) <= 0)
865 if (sval_cmp(tmp
->min
, min
) >= 0) {
867 add_range(&ret
, max
, tmp
->max
);
868 } else if (sval_cmp(tmp
->max
, max
) <= 0) {
870 add_range(&ret
, tmp
->min
, min
);
874 add_range(&ret
, tmp
->min
, min
);
875 add_range(&ret
, max
, tmp
->max
);
877 } END_FOR_EACH_PTR(tmp
);
882 int ranges_equiv(struct data_range
*one
, struct data_range
*two
)
888 if (sval_cmp(one
->min
, two
->min
) != 0)
890 if (sval_cmp(one
->max
, two
->max
) != 0)
895 int rl_equiv(struct range_list
*one
, struct range_list
*two
)
897 struct data_range
*one_range
;
898 struct data_range
*two_range
;
903 PREPARE_PTR_LIST(one
, one_range
);
904 PREPARE_PTR_LIST(two
, two_range
);
906 if (!one_range
&& !two_range
)
908 if (!ranges_equiv(one_range
, two_range
))
910 NEXT_PTR_LIST(one_range
);
911 NEXT_PTR_LIST(two_range
);
913 FINISH_PTR_LIST(two_range
);
914 FINISH_PTR_LIST(one_range
);
919 int true_comparison_range(struct data_range
*left
, int comparison
, struct data_range
*right
)
921 switch (comparison
) {
923 case SPECIAL_UNSIGNED_LT
:
924 if (sval_cmp(left
->min
, right
->max
) < 0)
927 case SPECIAL_UNSIGNED_LTE
:
929 if (sval_cmp(left
->min
, right
->max
) <= 0)
933 if (sval_cmp(left
->max
, right
->min
) < 0)
935 if (sval_cmp(left
->min
, right
->max
) > 0)
938 case SPECIAL_UNSIGNED_GTE
:
940 if (sval_cmp(left
->max
, right
->min
) >= 0)
944 case SPECIAL_UNSIGNED_GT
:
945 if (sval_cmp(left
->max
, right
->min
) > 0)
948 case SPECIAL_NOTEQUAL
:
949 if (sval_cmp(left
->min
, left
->max
) != 0)
951 if (sval_cmp(right
->min
, right
->max
) != 0)
953 if (sval_cmp(left
->min
, right
->min
) != 0)
957 sm_msg("unhandled comparison %d\n", comparison
);
963 int true_comparison_range_LR(int comparison
, struct data_range
*var
, struct data_range
*val
, int left
)
966 return true_comparison_range(var
, comparison
, val
);
968 return true_comparison_range(val
, comparison
, var
);
971 static int false_comparison_range_sval(struct data_range
*left
, int comparison
, struct data_range
*right
)
973 switch (comparison
) {
975 case SPECIAL_UNSIGNED_LT
:
976 if (sval_cmp(left
->max
, right
->min
) >= 0)
979 case SPECIAL_UNSIGNED_LTE
:
981 if (sval_cmp(left
->max
, right
->min
) > 0)
985 if (sval_cmp(left
->min
, left
->max
) != 0)
987 if (sval_cmp(right
->min
, right
->max
) != 0)
989 if (sval_cmp(left
->min
, right
->min
) != 0)
992 case SPECIAL_UNSIGNED_GTE
:
994 if (sval_cmp(left
->min
, right
->max
) < 0)
998 case SPECIAL_UNSIGNED_GT
:
999 if (sval_cmp(left
->min
, right
->max
) <= 0)
1002 case SPECIAL_NOTEQUAL
:
1003 if (sval_cmp(left
->max
, right
->min
) < 0)
1005 if (sval_cmp(left
->min
, right
->max
) > 0)
1009 sm_msg("unhandled comparison %d\n", comparison
);
1015 int false_comparison_range_LR(int comparison
, struct data_range
*var
, struct data_range
*val
, int left
)
1018 return false_comparison_range_sval(var
, comparison
, val
);
1020 return false_comparison_range_sval(val
, comparison
, var
);
1023 int possibly_true(struct expression
*left
, int comparison
, struct expression
*right
)
1025 struct range_list
*rl_left
, *rl_right
;
1026 struct data_range
*tmp_left
, *tmp_right
;
1027 struct symbol
*type
;
1029 if (!get_implied_rl(left
, &rl_left
))
1031 if (!get_implied_rl(right
, &rl_right
))
1034 type
= rl_type(rl_left
);
1035 if (type_positive_bits(type
) < type_positive_bits(rl_type(rl_right
)))
1036 type
= rl_type(rl_right
);
1037 if (type_positive_bits(type
) < 31)
1040 rl_left
= cast_rl(type
, rl_left
);
1041 rl_right
= cast_rl(type
, rl_right
);
1043 FOR_EACH_PTR(rl_left
, tmp_left
) {
1044 FOR_EACH_PTR(rl_right
, tmp_right
) {
1045 if (true_comparison_range(tmp_left
, comparison
, tmp_right
))
1047 } END_FOR_EACH_PTR(tmp_right
);
1048 } END_FOR_EACH_PTR(tmp_left
);
1052 int possibly_false(struct expression
*left
, int comparison
, struct expression
*right
)
1054 struct range_list
*rl_left
, *rl_right
;
1055 struct data_range
*tmp_left
, *tmp_right
;
1056 struct symbol
*type
;
1058 if (!get_implied_rl(left
, &rl_left
))
1060 if (!get_implied_rl(right
, &rl_right
))
1063 type
= rl_type(rl_left
);
1064 if (type_positive_bits(type
) < type_positive_bits(rl_type(rl_right
)))
1065 type
= rl_type(rl_right
);
1066 if (type_positive_bits(type
) < 31)
1069 rl_left
= cast_rl(type
, rl_left
);
1070 rl_right
= cast_rl(type
, rl_right
);
1072 FOR_EACH_PTR(rl_left
, tmp_left
) {
1073 FOR_EACH_PTR(rl_right
, tmp_right
) {
1074 if (false_comparison_range_sval(tmp_left
, comparison
, tmp_right
))
1076 } END_FOR_EACH_PTR(tmp_right
);
1077 } END_FOR_EACH_PTR(tmp_left
);
1081 int possibly_true_rl(struct range_list
*left_ranges
, int comparison
, struct range_list
*right_ranges
)
1083 struct data_range
*left_tmp
, *right_tmp
;
1084 struct symbol
*type
;
1086 if (!left_ranges
|| !right_ranges
)
1089 type
= rl_type(left_ranges
);
1090 if (type_positive_bits(type
) < type_positive_bits(rl_type(right_ranges
)))
1091 type
= rl_type(right_ranges
);
1092 if (type_positive_bits(type
) < 31)
1095 left_ranges
= cast_rl(type
, left_ranges
);
1096 right_ranges
= cast_rl(type
, right_ranges
);
1098 FOR_EACH_PTR(left_ranges
, left_tmp
) {
1099 FOR_EACH_PTR(right_ranges
, right_tmp
) {
1100 if (true_comparison_range(left_tmp
, comparison
, right_tmp
))
1102 } END_FOR_EACH_PTR(right_tmp
);
1103 } END_FOR_EACH_PTR(left_tmp
);
1107 int possibly_false_rl(struct range_list
*left_ranges
, int comparison
, struct range_list
*right_ranges
)
1109 struct data_range
*left_tmp
, *right_tmp
;
1110 struct symbol
*type
;
1112 if (!left_ranges
|| !right_ranges
)
1115 type
= rl_type(left_ranges
);
1116 if (type_positive_bits(type
) < type_positive_bits(rl_type(right_ranges
)))
1117 type
= rl_type(right_ranges
);
1118 if (type_positive_bits(type
) < 31)
1121 left_ranges
= cast_rl(type
, left_ranges
);
1122 right_ranges
= cast_rl(type
, right_ranges
);
1124 FOR_EACH_PTR(left_ranges
, left_tmp
) {
1125 FOR_EACH_PTR(right_ranges
, right_tmp
) {
1126 if (false_comparison_range_sval(left_tmp
, comparison
, right_tmp
))
1128 } END_FOR_EACH_PTR(right_tmp
);
1129 } END_FOR_EACH_PTR(left_tmp
);
1133 /* FIXME: the _rl here stands for right left so really it should be _lr */
1134 int possibly_true_rl_LR(int comparison
, struct range_list
*a
, struct range_list
*b
, int left
)
1137 return possibly_true_rl(a
, comparison
, b
);
1139 return possibly_true_rl(b
, comparison
, a
);
1142 int possibly_false_rl_LR(int comparison
, struct range_list
*a
, struct range_list
*b
, int left
)
1145 return possibly_false_rl(a
, comparison
, b
);
1147 return possibly_false_rl(b
, comparison
, a
);
1150 int rl_has_sval(struct range_list
*rl
, sval_t sval
)
1152 struct data_range
*tmp
;
1154 FOR_EACH_PTR(rl
, tmp
) {
1155 if (sval_cmp(tmp
->min
, sval
) <= 0 &&
1156 sval_cmp(tmp
->max
, sval
) >= 0)
1158 } END_FOR_EACH_PTR(tmp
);
1162 void tack_on(struct range_list
**list
, struct data_range
*drange
)
1164 add_ptr_list(list
, drange
);
1167 void push_rl(struct range_list_stack
**rl_stack
, struct range_list
*rl
)
1169 add_ptr_list(rl_stack
, rl
);
1172 struct range_list
*pop_rl(struct range_list_stack
**rl_stack
)
1174 struct range_list
*rl
;
1176 rl
= last_ptr_list((struct ptr_list
*)*rl_stack
);
1177 delete_ptr_list_last((struct ptr_list
**)rl_stack
);
1181 struct range_list
*top_rl(struct range_list_stack
*rl_stack
)
1183 struct range_list
*rl
;
1185 rl
= last_ptr_list((struct ptr_list
*)rl_stack
);
1189 void filter_top_rl(struct range_list_stack
**rl_stack
, struct range_list
*filter
)
1191 struct range_list
*rl
;
1193 rl
= pop_rl(rl_stack
);
1194 rl
= rl_filter(rl
, filter
);
1195 push_rl(rl_stack
, rl
);
1198 struct range_list
*rl_truncate_cast(struct symbol
*type
, struct range_list
*rl
)
1200 struct data_range
*tmp
;
1201 struct range_list
*ret
= NULL
;
1207 if (!type
|| type
== rl_type(rl
))
1210 FOR_EACH_PTR(rl
, tmp
) {
1213 if (type_bits(type
) < type_bits(rl_type(rl
))) {
1214 min
.uvalue
= tmp
->min
.uvalue
& ((1ULL << type_bits(type
)) - 1);
1215 max
.uvalue
= tmp
->max
.uvalue
& ((1ULL << type_bits(type
)) - 1);
1217 if (sval_cmp(min
, max
) > 0) {
1218 min
= sval_cast(type
, min
);
1219 max
= sval_cast(type
, max
);
1221 add_range_t(type
, &ret
, min
, max
);
1222 } END_FOR_EACH_PTR(tmp
);
1228 static int rl_is_sane(struct range_list
*rl
)
1230 struct data_range
*tmp
;
1231 struct symbol
*type
;
1234 FOR_EACH_PTR(rl
, tmp
) {
1235 if (!sval_fits(type
, tmp
->min
))
1237 if (!sval_fits(type
, tmp
->max
))
1239 if (sval_cmp(tmp
->min
, tmp
->max
) > 0)
1241 } END_FOR_EACH_PTR(tmp
);
1246 static int rl_type_consistent(struct range_list
*rl
)
1248 struct data_range
*tmp
;
1249 struct symbol
*type
;
1252 FOR_EACH_PTR(rl
, tmp
) {
1253 if (type
!= tmp
->min
.type
|| type
!= tmp
->max
.type
)
1255 } END_FOR_EACH_PTR(tmp
);
1259 static struct range_list
*cast_to_bool(struct range_list
*rl
)
1261 struct data_range
*tmp
;
1262 struct range_list
*ret
= NULL
;
1265 sval_t min
= { .type
= &bool_ctype
};
1266 sval_t max
= { .type
= &bool_ctype
};
1268 FOR_EACH_PTR(rl
, tmp
) {
1269 if (tmp
->min
.value
|| tmp
->max
.value
)
1271 if (sval_is_negative(tmp
->min
) &&
1272 sval_is_negative(tmp
->max
))
1274 if (tmp
->min
.value
== 0 ||
1275 tmp
->max
.value
== 0)
1277 if (sval_is_negative(tmp
->min
) &&
1280 } END_FOR_EACH_PTR(tmp
);
1287 add_range(&ret
, min
, max
);
1291 static struct range_list
*cast_rl_helper(struct symbol
*type
, struct range_list
*rl
)
1293 struct data_range
*tmp
;
1294 struct range_list
*ret
= NULL
;
1301 if (!rl_is_sane(rl
))
1302 return alloc_whole_rl(type
);
1303 if (type
== rl_type(rl
) && rl_type_consistent(rl
))
1306 if (type
== &bool_ctype
)
1307 return cast_to_bool(rl
);
1309 FOR_EACH_PTR(rl
, tmp
) {
1310 add_range_t(type
, &ret
, tmp
->min
, tmp
->max
);
1311 } END_FOR_EACH_PTR(tmp
);
1314 return alloc_whole_rl(type
);
1319 struct range_list
*cast_rl(struct symbol
*type
, struct range_list
*rl
)
1321 struct range_list
*ret
;
1323 ret
= cast_rl_helper(type
, rl
);
1329 static struct range_list
*cast_rl_private(struct symbol
*type
, struct range_list
*rl
)
1331 struct range_list
*ret
;
1333 ret
= cast_rl_helper(type
, rl
);
1337 struct range_list
*rl_invert(struct range_list
*orig
)
1339 struct range_list
*ret
= NULL
;
1340 struct data_range
*tmp
;
1341 sval_t gap_min
, abs_max
, sval
;
1345 if (type_bits(rl_type(orig
)) < 0) /* void type mostly */
1348 gap_min
= sval_type_min(rl_min(orig
).type
);
1349 abs_max
= sval_type_max(rl_max(orig
).type
);
1351 FOR_EACH_PTR(orig
, tmp
) {
1352 if (sval_cmp(tmp
->min
, gap_min
) > 0) {
1353 sval
= sval_type_val(tmp
->min
.type
, tmp
->min
.value
- 1);
1354 add_range(&ret
, gap_min
, sval
);
1356 if (sval_cmp(tmp
->max
, abs_max
) == 0) {
1360 gap_min
= sval_type_val(tmp
->max
.type
, tmp
->max
.value
+ 1);
1361 } END_FOR_EACH_PTR(tmp
);
1363 if (sval_cmp(gap_min
, abs_max
) <= 0)
1364 add_range(&ret
, gap_min
, abs_max
);
1370 struct range_list
*rl_filter(struct range_list
*rl
, struct range_list
*filter
)
1372 struct data_range
*tmp
;
1374 FOR_EACH_PTR(filter
, tmp
) {
1375 rl
= remove_range(rl
, tmp
->min
, tmp
->max
);
1376 } END_FOR_EACH_PTR(tmp
);
1381 struct range_list
*rl_intersection(struct range_list
*one
, struct range_list
*two
)
1383 struct range_list
*one_orig
;
1384 struct range_list
*two_orig
;
1385 struct range_list
*ret
;
1386 struct symbol
*ret_type
;
1387 struct symbol
*small_type
;
1388 struct symbol
*large_type
;
1398 ret_type
= rl_type(one
);
1399 small_type
= rl_type(one
);
1400 large_type
= rl_type(two
);
1402 if (type_bits(rl_type(two
)) < type_bits(small_type
)) {
1403 small_type
= rl_type(two
);
1404 large_type
= rl_type(one
);
1407 one
= cast_rl(large_type
, one
);
1408 two
= cast_rl(large_type
, two
);
1411 one
= rl_invert(one
);
1412 two
= rl_invert(two
);
1414 ret
= rl_filter(ret
, one
);
1415 ret
= rl_filter(ret
, two
);
1417 one
= cast_rl(small_type
, one_orig
);
1418 two
= cast_rl(small_type
, two_orig
);
1420 one
= rl_invert(one
);
1421 two
= rl_invert(two
);
1423 ret
= cast_rl(small_type
, ret
);
1424 ret
= rl_filter(ret
, one
);
1425 ret
= rl_filter(ret
, two
);
1427 return cast_rl(ret_type
, ret
);
1430 static struct range_list
*handle_mod_rl(struct range_list
*left
, struct range_list
*right
)
1435 max
= rl_max(right
);
1436 if (sval_is_max(max
))
1441 if (sval_is_negative(max
))
1443 if (sval_cmp(rl_max(left
), max
) < 0)
1447 return alloc_rl(zero
, max
);
1450 static struct range_list
*get_neg_rl(struct range_list
*rl
)
1452 struct data_range
*tmp
;
1453 struct data_range
*new;
1454 struct range_list
*ret
= NULL
;
1458 if (sval_is_positive(rl_min(rl
)))
1461 FOR_EACH_PTR(rl
, tmp
) {
1462 if (sval_is_positive(tmp
->min
))
1464 if (sval_is_positive(tmp
->max
)) {
1465 new = alloc_range(tmp
->min
, tmp
->max
);
1466 new->max
.value
= -1;
1467 add_range(&ret
, new->min
, new->max
);
1470 add_range(&ret
, tmp
->min
, tmp
->max
);
1471 } END_FOR_EACH_PTR(tmp
);
1477 static struct range_list
*get_pos_rl(struct range_list
*rl
)
1479 struct data_range
*tmp
;
1480 struct data_range
*new;
1481 struct range_list
*ret
= NULL
;
1485 if (sval_is_negative(rl_max(rl
)))
1488 FOR_EACH_PTR(rl
, tmp
) {
1489 if (sval_is_negative(tmp
->max
))
1491 if (sval_is_positive(tmp
->min
)) {
1492 add_range(&ret
, tmp
->min
, tmp
->max
);
1495 new = alloc_range(tmp
->min
, tmp
->max
);
1497 add_range(&ret
, new->min
, new->max
);
1498 } END_FOR_EACH_PTR(tmp
);
1504 static struct range_list
*divide_rl_helper(struct range_list
*left
, struct range_list
*right
)
1506 sval_t right_min
, right_max
;
1509 if (!left
|| !right
)
1512 /* let's assume we never divide by zero */
1513 right_min
= rl_min(right
);
1514 right_max
= rl_max(right
);
1515 if (right_min
.value
== 0 && right_max
.value
== 0)
1517 if (right_min
.value
== 0)
1518 right_min
.value
= 1;
1519 if (right_max
.value
== 0)
1520 right_max
.value
= -1;
1522 max
= sval_binop(rl_max(left
), '/', right_min
);
1523 min
= sval_binop(rl_min(left
), '/', right_max
);
1525 return alloc_rl(min
, max
);
1528 static struct range_list
*handle_divide_rl(struct range_list
*left
, struct range_list
*right
)
1530 struct range_list
*left_neg
, *left_pos
, *right_neg
, *right_pos
;
1531 struct range_list
*neg_neg
, *neg_pos
, *pos_neg
, *pos_pos
;
1532 struct range_list
*ret
;
1534 if (is_whole_rl(right
))
1537 left_neg
= get_neg_rl(left
);
1538 left_pos
= get_pos_rl(left
);
1539 right_neg
= get_neg_rl(right
);
1540 right_pos
= get_pos_rl(right
);
1542 neg_neg
= divide_rl_helper(left_neg
, right_neg
);
1543 neg_pos
= divide_rl_helper(left_neg
, right_pos
);
1544 pos_neg
= divide_rl_helper(left_pos
, right_neg
);
1545 pos_pos
= divide_rl_helper(left_pos
, right_pos
);
1547 ret
= rl_union(neg_neg
, neg_pos
);
1548 ret
= rl_union(ret
, pos_neg
);
1549 return rl_union(ret
, pos_pos
);
1552 static struct range_list
*handle_add_mult_rl(struct range_list
*left
, int op
, struct range_list
*right
)
1556 if (sval_binop_overflows(rl_min(left
), op
, rl_min(right
)))
1558 min
= sval_binop(rl_min(left
), op
, rl_min(right
));
1560 if (sval_binop_overflows(rl_max(left
), op
, rl_max(right
)))
1562 max
= sval_binop(rl_max(left
), op
, rl_max(right
));
1564 return alloc_rl(min
, max
);
1567 static struct range_list
*handle_sub_rl(struct range_list
*left_orig
, struct range_list
*right_orig
)
1569 struct range_list
*left_rl
, *right_rl
;
1570 struct symbol
*type
;
1572 sval_t min_ll
, max_ll
, res_ll
;
1575 /* TODO: These things should totally be using dranges where possible */
1577 if (!left_orig
|| !right_orig
)
1581 if (type_positive_bits(rl_type(left_orig
)) > type_positive_bits(type
))
1582 type
= rl_type(left_orig
);
1583 if (type_positive_bits(rl_type(right_orig
)) > type_positive_bits(type
))
1584 type
= rl_type(right_orig
);
1586 left_rl
= cast_rl(type
, left_orig
);
1587 right_rl
= cast_rl(type
, right_orig
);
1589 max
= rl_max(left_rl
);
1590 min
= sval_type_min(type
);
1592 min_ll
= rl_min(left_rl
);
1593 min_ll
.type
= &llong_ctype
;
1594 max_ll
= rl_max(right_rl
);
1595 max_ll
.type
= &llong_ctype
;
1597 res_ll
.value
= min_ll
.value
- max_ll
.value
;
1599 if (!sval_binop_overflows(rl_min(left_rl
), '-', rl_max(right_rl
))) {
1600 tmp
= sval_binop(rl_min(left_rl
), '-', rl_max(right_rl
));
1601 if (sval_cmp(tmp
, min
) > 0)
1603 } else if (type_positive_bits(type
) < 63 &&
1604 !sval_binop_overflows(min_ll
, '-', max_ll
) &&
1605 (min
.value
!= 0 && sval_cmp(res_ll
, min
) >= 0)) {
1606 struct range_list
*left_casted
, *right_casted
, *result
;
1608 left_casted
= cast_rl(&llong_ctype
, left_orig
);
1609 right_casted
= cast_rl(&llong_ctype
, right_orig
);
1610 result
= handle_sub_rl(left_casted
, right_casted
);
1611 return cast_rl(type
, result
);
1614 if (!sval_is_max(rl_max(left_rl
))) {
1615 tmp
= sval_binop(rl_max(left_rl
), '-', rl_min(right_rl
));
1616 if (sval_cmp(tmp
, max
) < 0)
1620 if (sval_is_min(min
) && sval_is_max(max
))
1623 return alloc_rl(min
, max
);
1626 static unsigned long long rl_bits_always_set(struct range_list
*rl
)
1628 return sval_fls_mask(rl_min(rl
));
1631 static unsigned long long rl_bits_maybe_set(struct range_list
*rl
)
1633 return sval_fls_mask(rl_max(rl
));
1636 static struct range_list
*handle_OR_rl(struct range_list
*left
, struct range_list
*right
)
1638 unsigned long long left_min
, left_max
, right_min
, right_max
;
1642 if ((rl_to_sval(left
, &sval
) || rl_to_sval(right
, &sval
)) &&
1643 !sval_binop_overflows(rl_max(left
), '+', rl_max(right
)))
1644 return rl_binop(left
, '+', right
);
1646 left_min
= rl_bits_always_set(left
);
1647 left_max
= rl_bits_maybe_set(left
);
1648 right_min
= rl_bits_always_set(right
);
1649 right_max
= rl_bits_maybe_set(right
);
1651 min
.type
= max
.type
= &ullong_ctype
;
1652 min
.uvalue
= left_min
| right_min
;
1653 max
.uvalue
= left_max
| right_max
;
1655 return cast_rl(rl_type(left
), alloc_rl(min
, max
));
1658 static struct range_list
*handle_XOR_rl(struct range_list
*left
, struct range_list
*right
)
1660 unsigned long long left_set
, left_maybe
;
1661 unsigned long long right_set
, right_maybe
;
1664 left_set
= rl_bits_always_set(left
);
1665 left_maybe
= rl_bits_maybe_set(left
);
1667 right_set
= rl_bits_always_set(right
);
1668 right_maybe
= rl_bits_maybe_set(right
);
1670 zero
= max
= rl_min(left
);
1672 max
.uvalue
= fls_mask((left_maybe
| right_maybe
) ^ (left_set
& right_set
));
1674 return cast_rl(rl_type(left
), alloc_rl(zero
, max
));
1677 static struct range_list
*handle_AND_rl(struct range_list
*left
, struct range_list
*right
)
1679 unsigned long long left_set
, left_maybe
;
1680 unsigned long long right_set
, right_maybe
;
1685 left_set
= rl_bits_always_set(left
);
1686 left_maybe
= rl_bits_maybe_set(left
);
1688 right_set
= rl_bits_always_set(right
);
1689 right_maybe
= rl_bits_maybe_set(right
);
1691 zero
= max
= rl_min(left
);
1693 max
.uvalue
= fls_mask((left_maybe
| right_maybe
) ^ (left_set
& right_set
));
1695 return cast_rl(rl_type(left
), alloc_rl(zero
, max
));
1698 struct range_list
*rl_binop(struct range_list
*left
, int op
, struct range_list
*right
)
1700 struct symbol
*cast_type
;
1701 sval_t left_sval
, right_sval
;
1702 struct range_list
*ret
= NULL
;
1704 cast_type
= rl_type(left
);
1705 if (sval_type_max(rl_type(left
)).uvalue
< sval_type_max(rl_type(right
)).uvalue
)
1706 cast_type
= rl_type(right
);
1707 if (sval_type_max(cast_type
).uvalue
< INT_MAX
)
1708 cast_type
= &int_ctype
;
1710 left
= cast_rl(cast_type
, left
);
1711 right
= cast_rl(cast_type
, right
);
1713 if (!left
&& !right
)
1716 if (rl_to_sval(left
, &left_sval
) && rl_to_sval(right
, &right_sval
)) {
1717 sval_t val
= sval_binop(left_sval
, op
, right_sval
);
1718 return alloc_rl(val
, val
);
1723 ret
= handle_mod_rl(left
, right
);
1726 ret
= handle_divide_rl(left
, right
);
1730 ret
= handle_add_mult_rl(left
, op
, right
);
1733 ret
= handle_OR_rl(left
, right
);
1736 ret
= handle_XOR_rl(left
, right
);
1739 ret
= handle_AND_rl(left
, right
);
1742 ret
= handle_sub_rl(left
, right
);
1744 /* FIXME: Do the rest as well */
1745 case SPECIAL_RIGHTSHIFT
:
1746 case SPECIAL_LEFTSHIFT
:
1753 void free_data_info_allocs(void)
1755 struct allocator_struct
*desc
= &data_info_allocator
;
1756 struct allocation_blob
*blob
= desc
->blobs
;
1761 desc
->allocations
= 0;
1762 desc
->total_bytes
= 0;
1763 desc
->useful_bytes
= 0;
1764 desc
->freelist
= NULL
;
1766 struct allocation_blob
*next
= blob
->next
;
1767 blob_free(blob
, desc
->chunking
);
1770 clear_data_range_alloc();
1773 void split_comparison_rl(struct range_list
*left_orig
, int op
, struct range_list
*right_orig
,
1774 struct range_list
**left_true_rl
, struct range_list
**left_false_rl
,
1775 struct range_list
**right_true_rl
, struct range_list
**right_false_rl
)
1777 struct range_list
*left_true
, *left_false
;
1778 struct range_list
*right_true
, *right_false
;
1781 min
= sval_type_min(rl_type(left_orig
));
1782 max
= sval_type_max(rl_type(left_orig
));
1784 left_true
= clone_rl(left_orig
);
1785 left_false
= clone_rl(left_orig
);
1786 right_true
= clone_rl(right_orig
);
1787 right_false
= clone_rl(right_orig
);
1791 case SPECIAL_UNSIGNED_LT
:
1792 left_true
= remove_range(left_orig
, rl_max(right_orig
), max
);
1793 if (!sval_is_min(rl_min(right_orig
))) {
1794 left_false
= remove_range(left_orig
, min
, sub_one(rl_min(right_orig
)));
1797 right_true
= remove_range(right_orig
, min
, rl_min(left_orig
));
1798 if (!sval_is_max(rl_max(left_orig
)))
1799 right_false
= remove_range(right_orig
, add_one(rl_max(left_orig
)), max
);
1801 case SPECIAL_UNSIGNED_LTE
:
1803 if (!sval_is_max(rl_max(right_orig
)))
1804 left_true
= remove_range(left_orig
, add_one(rl_max(right_orig
)), max
);
1805 left_false
= remove_range(left_orig
, min
, rl_min(right_orig
));
1807 if (!sval_is_min(rl_min(left_orig
)))
1808 right_true
= remove_range(right_orig
, min
, sub_one(rl_min(left_orig
)));
1809 right_false
= remove_range(right_orig
, rl_max(left_orig
), max
);
1811 if (sval_cmp(rl_min(left_orig
), rl_min(right_orig
)) == 0)
1812 left_false
= remove_range(left_false
, rl_min(left_orig
), rl_min(left_orig
));
1813 if (sval_cmp(rl_max(left_orig
), rl_max(right_orig
)) == 0)
1814 right_false
= remove_range(right_false
, rl_max(left_orig
), rl_max(left_orig
));
1817 if (!sval_is_max(rl_max(right_orig
))) {
1818 left_true
= remove_range(left_true
, add_one(rl_max(right_orig
)), max
);
1820 if (!sval_is_min(rl_min(right_orig
))) {
1821 left_true
= remove_range(left_true
, min
, sub_one(rl_min(right_orig
)));
1823 if (sval_cmp(rl_min(right_orig
), rl_max(right_orig
)) == 0)
1824 left_false
= remove_range(left_orig
, rl_min(right_orig
), rl_min(right_orig
));
1826 if (!sval_is_max(rl_max(left_orig
)))
1827 right_true
= remove_range(right_true
, add_one(rl_max(left_orig
)), max
);
1828 if (!sval_is_min(rl_min(left_orig
)))
1829 right_true
= remove_range(right_true
, min
, sub_one(rl_min(left_orig
)));
1830 if (sval_cmp(rl_min(left_orig
), rl_max(left_orig
)) == 0)
1831 right_false
= remove_range(right_orig
, rl_min(left_orig
), rl_min(left_orig
));
1833 case SPECIAL_UNSIGNED_GTE
:
1835 if (!sval_is_min(rl_min(right_orig
)))
1836 left_true
= remove_range(left_orig
, min
, sub_one(rl_min(right_orig
)));
1837 left_false
= remove_range(left_orig
, rl_max(right_orig
), max
);
1839 if (!sval_is_max(rl_max(left_orig
)))
1840 right_true
= remove_range(right_orig
, add_one(rl_max(left_orig
)), max
);
1841 right_false
= remove_range(right_orig
, min
, rl_min(left_orig
));
1843 if (sval_cmp(rl_min(left_orig
), rl_min(right_orig
)) == 0)
1844 right_false
= remove_range(right_false
, rl_min(left_orig
), rl_min(left_orig
));
1845 if (sval_cmp(rl_max(left_orig
), rl_max(right_orig
)) == 0)
1846 left_false
= remove_range(left_false
, rl_max(left_orig
), rl_max(left_orig
));
1849 case SPECIAL_UNSIGNED_GT
:
1850 left_true
= remove_range(left_orig
, min
, rl_min(right_orig
));
1851 if (!sval_is_max(rl_max(right_orig
)))
1852 left_false
= remove_range(left_orig
, add_one(rl_max(right_orig
)), max
);
1854 right_true
= remove_range(right_orig
, rl_max(left_orig
), max
);
1855 if (!sval_is_min(rl_min(left_orig
)))
1856 right_false
= remove_range(right_orig
, min
, sub_one(rl_min(left_orig
)));
1858 case SPECIAL_NOTEQUAL
:
1859 if (!sval_is_max(rl_max(right_orig
)))
1860 left_false
= remove_range(left_false
, add_one(rl_max(right_orig
)), max
);
1861 if (!sval_is_min(rl_min(right_orig
)))
1862 left_false
= remove_range(left_false
, min
, sub_one(rl_min(right_orig
)));
1863 if (sval_cmp(rl_min(right_orig
), rl_max(right_orig
)) == 0)
1864 left_true
= remove_range(left_orig
, rl_min(right_orig
), rl_min(right_orig
));
1866 if (!sval_is_max(rl_max(left_orig
)))
1867 right_false
= remove_range(right_false
, add_one(rl_max(left_orig
)), max
);
1868 if (!sval_is_min(rl_min(left_orig
)))
1869 right_false
= remove_range(right_false
, min
, sub_one(rl_min(left_orig
)));
1870 if (sval_cmp(rl_min(left_orig
), rl_max(left_orig
)) == 0)
1871 right_true
= remove_range(right_orig
, rl_min(left_orig
), rl_min(left_orig
));
1874 sm_msg("internal error: unhandled comparison %d", op
);
1879 *left_true_rl
= left_true
;
1880 *left_false_rl
= left_false
;
1882 if (right_true_rl
) {
1883 *right_true_rl
= right_true
;
1884 *right_false_rl
= right_false
;