2 * Copyright (C) 2009 Dan Carpenter.
4 * This program is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU General Public License
6 * as published by the Free Software Foundation; either version 2
7 * of the License, or (at your option) any later version.
9 * This program is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 * GNU General Public License for more details.
14 * You should have received a copy of the GNU General Public License
15 * along with this program; if not, see http://www.gnu.org/copyleft/gpl.txt
20 #include "smatch_extra.h"
21 #include "smatch_slist.h"
23 ALLOCATOR(data_info
, "smatch extra data");
24 ALLOCATOR(data_range
, "data range");
25 __DO_ALLOCATOR(struct data_range
, sizeof(struct data_range
), __alignof__(struct data_range
),
26 "permanent ranges", perm_data_range
);
27 __DECLARE_ALLOCATOR(struct ptr_list
, rl_ptrlist
);
29 char *show_rl(struct range_list
*list
)
31 struct data_range
*tmp
;
36 full
[sizeof(full
) - 1] = '\0';
37 FOR_EACH_PTR(list
, tmp
) {
39 strncat(full
, ",", 254 - strlen(full
));
40 if (sval_cmp(tmp
->min
, tmp
->max
) == 0) {
41 strncat(full
, sval_to_str(tmp
->min
), 254 - strlen(full
));
44 strncat(full
, sval_to_str(tmp
->min
), 254 - strlen(full
));
45 strncat(full
, "-", 254 - strlen(full
));
46 strncat(full
, sval_to_str(tmp
->max
), 254 - strlen(full
));
47 } END_FOR_EACH_PTR(tmp
);
48 if (strlen(full
) == sizeof(full
) - 1)
49 full
[sizeof(full
) - 2] = '+';
50 return alloc_sname(full
);
53 void free_all_rl(void)
55 clear_rl_ptrlist_alloc();
58 static int sval_too_big(struct symbol
*type
, sval_t sval
)
60 if (type_bits(type
) >= 32 &&
61 type_bits(sval
.type
) <= type_bits(type
))
63 if (sval
.uvalue
<= ((1ULL << type_bits(type
)) - 1))
65 if (type_signed(sval
.type
)) {
66 if (type_unsigned(type
)) {
67 unsigned long long neg
= ~sval
.uvalue
;
68 if (neg
<= sval_type_max(type
).uvalue
)
71 if (sval
.value
< sval_type_min(type
).value
)
73 if (sval
.value
> sval_type_max(type
).value
)
77 if (sval
.uvalue
> sval_type_max(type
).uvalue
)
82 static void add_range_t(struct symbol
*type
, struct range_list
**rl
, sval_t min
, sval_t max
)
84 /* If we're just adding a number, cast it and add it */
85 if (sval_cmp(min
, max
) == 0) {
86 add_range(rl
, sval_cast(type
, min
), sval_cast(type
, max
));
90 /* If the range is within the type range then add it */
91 if (sval_fits(type
, min
) && sval_fits(type
, max
)) {
92 add_range(rl
, sval_cast(type
, min
), sval_cast(type
, max
));
97 * If the range we are adding has more bits than the range type then
98 * add the whole range type. Eg:
99 * 0x8000000000000000 - 0xf000000000000000 -> cast to int
100 * This isn't totally the right thing to do. We could be more granular.
102 if (sval_too_big(type
, min
) || sval_too_big(type
, max
)) {
103 add_range(rl
, sval_type_min(type
), sval_type_max(type
));
107 /* Cast negative values to high positive values */
108 if (sval_is_negative(min
) && type_unsigned(type
)) {
109 if (sval_is_positive(max
)) {
110 if (sval_too_high(type
, max
)) {
111 add_range(rl
, sval_type_min(type
), sval_type_max(type
));
114 add_range(rl
, sval_type_val(type
, 0), sval_cast(type
, max
));
115 max
= sval_type_max(type
);
117 max
= sval_cast(type
, max
);
119 min
= sval_cast(type
, min
);
120 add_range(rl
, min
, max
);
123 /* Cast high positive numbers to negative */
124 if (sval_unsigned(max
) && sval_is_negative(sval_cast(type
, max
))) {
125 if (!sval_is_negative(sval_cast(type
, min
))) {
126 add_range(rl
, sval_cast(type
, min
), sval_type_max(type
));
127 min
= sval_type_min(type
);
129 min
= sval_cast(type
, min
);
131 max
= sval_cast(type
, max
);
132 add_range(rl
, min
, max
);
135 add_range(rl
, sval_cast(type
, min
), sval_cast(type
, max
));
139 static int str_to_comparison_arg_helper(const char *str
,
140 struct expression
*call
, int *comparison
,
141 struct expression
**arg
, char **endp
)
144 char *c
= (char *)str
;
153 *comparison
= SPECIAL_LTE
;
158 } else if (*c
== '=') {
161 *comparison
= SPECIAL_EQUAL
;
162 } else if (*c
== '>') {
165 *comparison
= SPECIAL_GTE
;
170 } else if (*c
== '!') {
173 *comparison
= SPECIAL_NOTEQUAL
;
182 param
= strtoll(c
, &c
, 10);
183 c
++; /* skip the ']' character */
189 *arg
= get_argument_from_call_expr(call
->args
, param
);
195 int str_to_comparison_arg(const char *str
, struct expression
*call
, int *comparison
, struct expression
**arg
)
204 return str_to_comparison_arg_helper(str
, call
, comparison
, arg
, NULL
);
207 static int get_val_from_key(int use_max
, struct symbol
*type
, char *c
, struct expression
*call
, char **endp
, sval_t
*sval
)
209 struct expression
*arg
;
214 ret
= sval_type_max(type
);
216 ret
= sval_type_min(type
);
218 if (!str_to_comparison_arg_helper(c
, call
, &comparison
, &arg
, endp
)) {
223 if (use_max
&& get_implied_max(arg
, &tmp
)) {
225 if (comparison
== '<') {
227 ret
= sval_binop(ret
, '-', tmp
);
230 if (!use_max
&& get_implied_min(arg
, &tmp
)) {
232 if (comparison
== '>') {
234 ret
= sval_binop(ret
, '+', tmp
);
242 static sval_t
add_one(sval_t sval
)
248 static sval_t
sub_one(sval_t sval
)
254 void filter_by_comparison(struct range_list
**rl
, int comparison
, struct range_list
*right
)
256 struct range_list
*left_orig
= *rl
;
257 struct range_list
*right_orig
= right
;
258 struct range_list
*ret_rl
= *rl
;
259 struct symbol
*cast_type
;
262 cast_type
= rl_type(left_orig
);
263 if (sval_type_max(rl_type(left_orig
)).uvalue
< sval_type_max(rl_type(right_orig
)).uvalue
)
264 cast_type
= rl_type(right_orig
);
265 if (sval_type_max(cast_type
).uvalue
< INT_MAX
)
266 cast_type
= &int_ctype
;
268 min
= sval_type_min(cast_type
);
269 max
= sval_type_max(cast_type
);
270 left_orig
= cast_rl(cast_type
, left_orig
);
271 right_orig
= cast_rl(cast_type
, right_orig
);
273 switch (comparison
) {
275 case SPECIAL_UNSIGNED_LT
:
276 ret_rl
= remove_range(left_orig
, rl_max(right_orig
), max
);
279 case SPECIAL_UNSIGNED_LTE
:
280 if (!sval_is_max(rl_max(right_orig
)))
281 ret_rl
= remove_range(left_orig
, add_one(rl_max(right_orig
)), max
);
284 if (!sval_is_max(rl_max(right_orig
)))
285 ret_rl
= remove_range(ret_rl
, add_one(rl_max(right_orig
)), max
);
286 if (!sval_is_min(rl_min(right_orig
)))
287 ret_rl
= remove_range(ret_rl
, min
, sub_one(rl_min(right_orig
)));
290 case SPECIAL_UNSIGNED_GTE
:
291 if (!sval_is_min(rl_min(right_orig
)))
292 ret_rl
= remove_range(left_orig
, min
, sub_one(rl_min(right_orig
)));
295 case SPECIAL_UNSIGNED_GT
:
296 ret_rl
= remove_range(left_orig
, min
, rl_min(right_orig
));
298 case SPECIAL_NOTEQUAL
:
299 if (sval_cmp(rl_min(right_orig
), rl_max(right_orig
)) == 0)
300 ret_rl
= remove_range(left_orig
, rl_min(right_orig
), rl_min(right_orig
));
303 sm_msg("internal error: unhandled comparison %s", show_special(comparison
));
307 *rl
= cast_rl(rl_type(*rl
), ret_rl
);
310 static struct range_list
*filter_by_comparison_call(char *c
, struct expression
*call
, char **endp
, struct range_list
*start_rl
)
313 struct expression
*arg
;
314 struct range_list
*casted_start
, *right_orig
;
317 if (!str_to_comparison_arg_helper(c
, call
, &comparison
, &arg
, endp
))
320 if (!get_implied_rl(arg
, &right_orig
))
324 if (type_positive_bits(rl_type(start_rl
)) > type_positive_bits(type
))
325 type
= rl_type(start_rl
);
326 if (type_positive_bits(rl_type(right_orig
)) > type_positive_bits(type
))
327 type
= rl_type(right_orig
);
329 casted_start
= cast_rl(type
, start_rl
);
330 right_orig
= cast_rl(type
, right_orig
);
332 filter_by_comparison(&casted_start
, comparison
, right_orig
);
333 return cast_rl(rl_type(start_rl
), casted_start
);
336 static sval_t
parse_val(int use_max
, struct expression
*call
, struct symbol
*type
, char *c
, char **endp
)
341 if (!strncmp(start
, "max", 3)) {
342 ret
= sval_type_max(type
);
344 } else if (!strncmp(start
, "u64max", 6)) {
345 ret
= sval_type_val(type
, ULLONG_MAX
);
347 } else if (!strncmp(start
, "s64max", 6)) {
348 ret
= sval_type_val(type
, LLONG_MAX
);
350 } else if (!strncmp(start
, "u32max", 6)) {
351 ret
= sval_type_val(type
, UINT_MAX
);
353 } else if (!strncmp(start
, "s32max", 6)) {
354 ret
= sval_type_val(type
, INT_MAX
);
356 } else if (!strncmp(start
, "u16max", 6)) {
357 ret
= sval_type_val(type
, USHRT_MAX
);
359 } else if (!strncmp(start
, "s16max", 6)) {
360 ret
= sval_type_val(type
, SHRT_MAX
);
362 } else if (!strncmp(start
, "min", 3)) {
363 ret
= sval_type_min(type
);
365 } else if (!strncmp(start
, "s64min", 6)) {
366 ret
= sval_type_val(type
, LLONG_MIN
);
368 } else if (!strncmp(start
, "s32min", 6)) {
369 ret
= sval_type_val(type
, INT_MIN
);
371 } else if (!strncmp(start
, "s16min", 6)) {
372 ret
= sval_type_val(type
, SHRT_MIN
);
374 } else if (!strncmp(start
, "long_min", 8)) {
375 ret
= sval_type_val(type
, LONG_MIN
);
377 } else if (!strncmp(start
, "long_max", 8)) {
378 ret
= sval_type_val(type
, LONG_MAX
);
380 } else if (!strncmp(start
, "ulong_max", 9)) {
381 ret
= sval_type_val(type
, ULONG_MAX
);
383 } else if (!strncmp(start
, "ptr_max", 7)) {
384 ret
= sval_type_val(type
, valid_ptr_max
);
386 } else if (start
[0] == '[') {
387 /* this parses [==p0] comparisons */
388 get_val_from_key(1, type
, start
, call
, &c
, &ret
);
389 } else if (type_positive_bits(type
) == 64) {
390 ret
= sval_type_val(type
, strtoull(start
, &c
, 10));
392 ret
= sval_type_val(type
, strtoll(start
, &c
, 10));
398 static char *jump_to_call_math(char *value
)
402 while (*c
&& *c
!= '[')
408 if (*c
== '<' || *c
== '=' || *c
== '>' || *c
== '!')
414 static void str_to_rl_helper(struct expression
*call
, struct symbol
*type
, char *str
, char **endp
, struct range_list
**rl
)
416 struct range_list
*rl_tmp
= NULL
;
420 min
= sval_type_min(type
);
421 max
= sval_type_max(type
);
423 while (*c
!= '\0' && *c
!= '[') {
425 if (sval_cmp(min
, sval_type_min(type
)) != 0)
427 max
= sval_type_max(type
);
428 add_range_t(type
, &rl_tmp
, min
, max
);
433 min
= parse_val(0, call
, type
, c
, &c
);
434 if (!sval_fits(type
, min
))
435 min
= sval_type_min(type
);
439 if (*c
== '\0' || *c
== '[') {
440 add_range_t(type
, &rl_tmp
, min
, min
);
444 add_range_t(type
, &rl_tmp
, min
, min
);
449 min
= sval_type_max(type
);
453 sm_msg("debug XXX: trouble parsing %s c = %s", str
, c
);
459 max
= parse_val(1, call
, type
, c
, &c
);
460 if (!sval_fits(type
, max
))
461 max
= sval_type_max(type
);
463 max
= sval_type_max(type
);
466 add_range_t(type
, &rl_tmp
, min
, max
);
477 static void str_to_dinfo(struct expression
*call
, struct symbol
*type
, char *value
, struct data_info
*dinfo
)
479 struct range_list
*math_rl
;
482 struct range_list
*rl
= NULL
;
487 if (strcmp(value
, "empty") == 0)
490 if (strncmp(value
, "[==$", 4) == 0) {
491 struct expression
*arg
;
494 if (!str_to_comparison_arg(value
, call
, &comparison
, &arg
))
496 if (!get_implied_rl(arg
, &rl
))
501 str_to_rl_helper(call
, type
, value
, &c
, &rl
);
505 call_math
= jump_to_call_math(value
);
506 if (call_math
&& parse_call_math_rl(call
, call_math
, &math_rl
)) {
507 rl
= rl_intersection(rl
, math_rl
);
512 * For now if we already tried to handle the call math and couldn't
513 * figure it out then bail.
515 if (jump_to_call_math(c
) == c
+ 1)
518 rl
= filter_by_comparison_call(c
, call
, &c
, rl
);
521 rl
= cast_rl(type
, rl
);
522 dinfo
->value_ranges
= rl
;
525 void str_to_rl(struct symbol
*type
, char *value
, struct range_list
**rl
)
527 struct data_info dinfo
= {};
529 str_to_dinfo(NULL
, type
, value
, &dinfo
);
530 *rl
= dinfo
.value_ranges
;
533 void call_results_to_rl(struct expression
*expr
, struct symbol
*type
, char *value
, struct range_list
**rl
)
535 struct data_info dinfo
= {};
537 str_to_dinfo(strip_expr(expr
), type
, value
, &dinfo
);
538 *rl
= dinfo
.value_ranges
;
541 int is_whole_rl(struct range_list
*rl
)
543 struct data_range
*drange
;
545 if (ptr_list_empty(rl
))
547 drange
= first_ptr_list((struct ptr_list
*)rl
);
548 if (sval_is_min(drange
->min
) && sval_is_max(drange
->max
))
553 int is_whole_rl_non_zero(struct range_list
*rl
)
555 struct data_range
*drange
;
557 if (ptr_list_empty(rl
))
559 drange
= first_ptr_list((struct ptr_list
*)rl
);
560 if (sval_unsigned(drange
->min
) &&
561 drange
->min
.value
== 1 &&
562 sval_is_max(drange
->max
))
564 if (!sval_is_min(drange
->min
) || drange
->max
.value
!= -1)
566 drange
= last_ptr_list((struct ptr_list
*)rl
);
567 if (drange
->min
.value
!= 1 || !sval_is_max(drange
->max
))
572 sval_t
rl_min(struct range_list
*rl
)
574 struct data_range
*drange
;
577 ret
.type
= &llong_ctype
;
578 ret
.value
= LLONG_MIN
;
579 if (ptr_list_empty(rl
))
581 drange
= first_ptr_list((struct ptr_list
*)rl
);
585 sval_t
rl_max(struct range_list
*rl
)
587 struct data_range
*drange
;
590 ret
.type
= &llong_ctype
;
591 ret
.value
= LLONG_MAX
;
592 if (ptr_list_empty(rl
))
594 drange
= last_ptr_list((struct ptr_list
*)rl
);
598 int rl_to_sval(struct range_list
*rl
, sval_t
*sval
)
607 if (sval_cmp(min
, max
) != 0)
613 struct symbol
*rl_type(struct range_list
*rl
)
617 return rl_min(rl
).type
;
620 static struct data_range
*alloc_range_helper_sval(sval_t min
, sval_t max
, int perm
)
622 struct data_range
*ret
;
625 ret
= __alloc_perm_data_range(0);
627 ret
= __alloc_data_range(0);
633 struct data_range
*alloc_range(sval_t min
, sval_t max
)
635 return alloc_range_helper_sval(min
, max
, 0);
638 struct data_range
*alloc_range_perm(sval_t min
, sval_t max
)
640 return alloc_range_helper_sval(min
, max
, 1);
643 struct range_list
*alloc_rl(sval_t min
, sval_t max
)
645 struct range_list
*rl
= NULL
;
647 if (sval_cmp(min
, max
) > 0)
648 return alloc_whole_rl(min
.type
);
650 add_range(&rl
, min
, max
);
654 struct range_list
*alloc_whole_rl(struct symbol
*type
)
656 if (!type
|| type_positive_bits(type
) < 0)
658 if (type
->type
== SYM_ARRAY
)
661 return alloc_rl(sval_type_min(type
), sval_type_max(type
));
664 extern int rl_ptrlist_hack
;
665 void add_range(struct range_list
**list
, sval_t min
, sval_t max
)
667 struct data_range
*tmp
;
668 struct data_range
*new = NULL
;
672 * There is at least on valid reason why the types might be confusing
673 * and that's when you have a void pointer and on some paths you treat
674 * it as a u8 pointer and on other paths you treat it as a u16 pointer.
675 * This case is hard to deal with.
677 * There are other cases where we probably should be more specific about
678 * the types than we are. For example, we end up merging a lot of ulong
679 * with pointers and I have not figured out why we do that.
681 * But this hack works for both cases, I think. We cast it to pointers
682 * or we use the bigger size.
685 if (*list
&& rl_type(*list
) != min
.type
) {
686 if (rl_type(*list
)->type
== SYM_PTR
) {
687 min
= sval_cast(rl_type(*list
), min
);
688 max
= sval_cast(rl_type(*list
), max
);
689 } else if (min
.type
->type
== SYM_PTR
) {
690 *list
= cast_rl(min
.type
, *list
);
691 } else if (type_bits(rl_type(*list
)) >= type_bits(min
.type
)) {
692 min
= sval_cast(rl_type(*list
), min
);
693 max
= sval_cast(rl_type(*list
), max
);
695 *list
= cast_rl(min
.type
, *list
);
699 if (sval_cmp(min
, max
) > 0) {
700 min
= sval_type_min(min
.type
);
701 max
= sval_type_max(min
.type
);
705 * FIXME: This has a problem merging a range_list like: min-0,3-max
706 * with a range like 1-2. You end up with min-2,3-max instead of
709 FOR_EACH_PTR(*list
, tmp
) {
711 /* Sometimes we overlap with more than one range
712 so we have to delete or modify the next range. */
713 if (!sval_is_max(max
) && max
.value
+ 1 == tmp
->min
.value
) {
714 /* join 2 ranges here */
716 DELETE_CURRENT_PTR(tmp
);
720 /* Doesn't overlap with the next one. */
721 if (sval_cmp(max
, tmp
->min
) < 0)
724 if (sval_cmp(max
, tmp
->max
) <= 0) {
725 /* Partially overlaps the next one. */
727 DELETE_CURRENT_PTR(tmp
);
730 /* Completely overlaps the next one. */
731 DELETE_CURRENT_PTR(tmp
);
732 /* there could be more ranges to delete */
736 if (!sval_is_max(max
) && max
.value
+ 1 == tmp
->min
.value
) {
737 /* join 2 ranges into a big range */
738 new = alloc_range(min
, tmp
->max
);
739 REPLACE_CURRENT_PTR(tmp
, new);
742 if (sval_cmp(max
, tmp
->min
) < 0) { /* new range entirely below */
743 new = alloc_range(min
, max
);
744 INSERT_CURRENT(new, tmp
);
747 if (sval_cmp(min
, tmp
->min
) < 0) { /* new range partially below */
748 if (sval_cmp(max
, tmp
->max
) < 0)
752 new = alloc_range(min
, max
);
753 REPLACE_CURRENT_PTR(tmp
, new);
758 if (sval_cmp(max
, tmp
->max
) <= 0) /* new range already included */
760 if (sval_cmp(min
, tmp
->max
) <= 0) { /* new range partially above */
762 new = alloc_range(min
, max
);
763 REPLACE_CURRENT_PTR(tmp
, new);
767 if (!sval_is_min(min
) && min
.value
- 1 == tmp
->max
.value
) {
768 /* join 2 ranges into a big range */
769 new = alloc_range(tmp
->min
, max
);
770 REPLACE_CURRENT_PTR(tmp
, new);
774 /* the new range is entirely above the existing ranges */
775 } END_FOR_EACH_PTR(tmp
);
778 new = alloc_range(min
, max
);
781 add_ptr_list(list
, new);
785 struct range_list
*clone_rl(struct range_list
*list
)
787 struct data_range
*tmp
;
788 struct range_list
*ret
= NULL
;
790 FOR_EACH_PTR(list
, tmp
) {
791 add_ptr_list(&ret
, tmp
);
792 } END_FOR_EACH_PTR(tmp
);
796 struct range_list
*clone_rl_permanent(struct range_list
*list
)
798 struct data_range
*tmp
;
799 struct data_range
*new;
800 struct range_list
*ret
= NULL
;
802 FOR_EACH_PTR(list
, tmp
) {
803 new = alloc_range_perm(tmp
->min
, tmp
->max
);
804 add_ptr_list(&ret
, new);
805 } END_FOR_EACH_PTR(tmp
);
809 struct range_list
*rl_union(struct range_list
*one
, struct range_list
*two
)
811 struct data_range
*tmp
;
812 struct range_list
*ret
= NULL
;
814 FOR_EACH_PTR(one
, tmp
) {
815 add_range(&ret
, tmp
->min
, tmp
->max
);
816 } END_FOR_EACH_PTR(tmp
);
817 FOR_EACH_PTR(two
, tmp
) {
818 add_range(&ret
, tmp
->min
, tmp
->max
);
819 } END_FOR_EACH_PTR(tmp
);
823 struct range_list
*remove_range(struct range_list
*list
, sval_t min
, sval_t max
)
825 struct data_range
*tmp
;
826 struct range_list
*ret
= NULL
;
831 min
= sval_cast(rl_type(list
), min
);
832 max
= sval_cast(rl_type(list
), max
);
833 if (sval_cmp(min
, max
) > 0) {
839 FOR_EACH_PTR(list
, tmp
) {
840 if (sval_cmp(tmp
->max
, min
) < 0) {
841 add_range(&ret
, tmp
->min
, tmp
->max
);
844 if (sval_cmp(tmp
->min
, max
) > 0) {
845 add_range(&ret
, tmp
->min
, tmp
->max
);
848 if (sval_cmp(tmp
->min
, min
) >= 0 && sval_cmp(tmp
->max
, max
) <= 0)
850 if (sval_cmp(tmp
->min
, min
) >= 0) {
852 add_range(&ret
, max
, tmp
->max
);
853 } else if (sval_cmp(tmp
->max
, max
) <= 0) {
855 add_range(&ret
, tmp
->min
, min
);
859 add_range(&ret
, tmp
->min
, min
);
860 add_range(&ret
, max
, tmp
->max
);
862 } END_FOR_EACH_PTR(tmp
);
866 int ranges_equiv(struct data_range
*one
, struct data_range
*two
)
872 if (sval_cmp(one
->min
, two
->min
) != 0)
874 if (sval_cmp(one
->max
, two
->max
) != 0)
879 int rl_equiv(struct range_list
*one
, struct range_list
*two
)
881 struct data_range
*one_range
;
882 struct data_range
*two_range
;
887 PREPARE_PTR_LIST(one
, one_range
);
888 PREPARE_PTR_LIST(two
, two_range
);
890 if (!one_range
&& !two_range
)
892 if (!ranges_equiv(one_range
, two_range
))
894 NEXT_PTR_LIST(one_range
);
895 NEXT_PTR_LIST(two_range
);
897 FINISH_PTR_LIST(two_range
);
898 FINISH_PTR_LIST(one_range
);
903 int true_comparison_range(struct data_range
*left
, int comparison
, struct data_range
*right
)
905 switch (comparison
) {
907 case SPECIAL_UNSIGNED_LT
:
908 if (sval_cmp(left
->min
, right
->max
) < 0)
911 case SPECIAL_UNSIGNED_LTE
:
913 if (sval_cmp(left
->min
, right
->max
) <= 0)
917 if (sval_cmp(left
->max
, right
->min
) < 0)
919 if (sval_cmp(left
->min
, right
->max
) > 0)
922 case SPECIAL_UNSIGNED_GTE
:
924 if (sval_cmp(left
->max
, right
->min
) >= 0)
928 case SPECIAL_UNSIGNED_GT
:
929 if (sval_cmp(left
->max
, right
->min
) > 0)
932 case SPECIAL_NOTEQUAL
:
933 if (sval_cmp(left
->min
, left
->max
) != 0)
935 if (sval_cmp(right
->min
, right
->max
) != 0)
937 if (sval_cmp(left
->min
, right
->min
) != 0)
941 sm_msg("unhandled comparison %d\n", comparison
);
947 int true_comparison_range_LR(int comparison
, struct data_range
*var
, struct data_range
*val
, int left
)
950 return true_comparison_range(var
, comparison
, val
);
952 return true_comparison_range(val
, comparison
, var
);
955 static int false_comparison_range_sval(struct data_range
*left
, int comparison
, struct data_range
*right
)
957 switch (comparison
) {
959 case SPECIAL_UNSIGNED_LT
:
960 if (sval_cmp(left
->max
, right
->min
) >= 0)
963 case SPECIAL_UNSIGNED_LTE
:
965 if (sval_cmp(left
->max
, right
->min
) > 0)
969 if (sval_cmp(left
->min
, left
->max
) != 0)
971 if (sval_cmp(right
->min
, right
->max
) != 0)
973 if (sval_cmp(left
->min
, right
->min
) != 0)
976 case SPECIAL_UNSIGNED_GTE
:
978 if (sval_cmp(left
->min
, right
->max
) < 0)
982 case SPECIAL_UNSIGNED_GT
:
983 if (sval_cmp(left
->min
, right
->max
) <= 0)
986 case SPECIAL_NOTEQUAL
:
987 if (sval_cmp(left
->max
, right
->min
) < 0)
989 if (sval_cmp(left
->min
, right
->max
) > 0)
993 sm_msg("unhandled comparison %d\n", comparison
);
999 int false_comparison_range_LR(int comparison
, struct data_range
*var
, struct data_range
*val
, int left
)
1002 return false_comparison_range_sval(var
, comparison
, val
);
1004 return false_comparison_range_sval(val
, comparison
, var
);
1007 int possibly_true(struct expression
*left
, int comparison
, struct expression
*right
)
1009 struct range_list
*rl_left
, *rl_right
;
1010 struct data_range
*tmp_left
, *tmp_right
;
1011 struct symbol
*type
;
1013 if (!get_implied_rl(left
, &rl_left
))
1015 if (!get_implied_rl(right
, &rl_right
))
1018 type
= rl_type(rl_left
);
1019 if (type_positive_bits(type
) < type_positive_bits(rl_type(rl_right
)))
1020 type
= rl_type(rl_right
);
1021 if (type_positive_bits(type
) < 31)
1024 rl_left
= cast_rl(type
, rl_left
);
1025 rl_right
= cast_rl(type
, rl_right
);
1027 FOR_EACH_PTR(rl_left
, tmp_left
) {
1028 FOR_EACH_PTR(rl_right
, tmp_right
) {
1029 if (true_comparison_range(tmp_left
, comparison
, tmp_right
))
1031 } END_FOR_EACH_PTR(tmp_right
);
1032 } END_FOR_EACH_PTR(tmp_left
);
1036 int possibly_false(struct expression
*left
, int comparison
, struct expression
*right
)
1038 struct range_list
*rl_left
, *rl_right
;
1039 struct data_range
*tmp_left
, *tmp_right
;
1040 struct symbol
*type
;
1042 if (!get_implied_rl(left
, &rl_left
))
1044 if (!get_implied_rl(right
, &rl_right
))
1047 type
= rl_type(rl_left
);
1048 if (type_positive_bits(type
) < type_positive_bits(rl_type(rl_right
)))
1049 type
= rl_type(rl_right
);
1050 if (type_positive_bits(type
) < 31)
1053 rl_left
= cast_rl(type
, rl_left
);
1054 rl_right
= cast_rl(type
, rl_right
);
1056 FOR_EACH_PTR(rl_left
, tmp_left
) {
1057 FOR_EACH_PTR(rl_right
, tmp_right
) {
1058 if (false_comparison_range_sval(tmp_left
, comparison
, tmp_right
))
1060 } END_FOR_EACH_PTR(tmp_right
);
1061 } END_FOR_EACH_PTR(tmp_left
);
1065 int possibly_true_rl(struct range_list
*left_ranges
, int comparison
, struct range_list
*right_ranges
)
1067 struct data_range
*left_tmp
, *right_tmp
;
1068 struct symbol
*type
;
1070 if (!left_ranges
|| !right_ranges
)
1073 type
= rl_type(left_ranges
);
1074 if (type_positive_bits(type
) < type_positive_bits(rl_type(right_ranges
)))
1075 type
= rl_type(right_ranges
);
1076 if (type_positive_bits(type
) < 31)
1079 left_ranges
= cast_rl(type
, left_ranges
);
1080 right_ranges
= cast_rl(type
, right_ranges
);
1082 FOR_EACH_PTR(left_ranges
, left_tmp
) {
1083 FOR_EACH_PTR(right_ranges
, right_tmp
) {
1084 if (true_comparison_range(left_tmp
, comparison
, right_tmp
))
1086 } END_FOR_EACH_PTR(right_tmp
);
1087 } END_FOR_EACH_PTR(left_tmp
);
1091 int possibly_false_rl(struct range_list
*left_ranges
, int comparison
, struct range_list
*right_ranges
)
1093 struct data_range
*left_tmp
, *right_tmp
;
1094 struct symbol
*type
;
1096 if (!left_ranges
|| !right_ranges
)
1099 type
= rl_type(left_ranges
);
1100 if (type_positive_bits(type
) < type_positive_bits(rl_type(right_ranges
)))
1101 type
= rl_type(right_ranges
);
1102 if (type_positive_bits(type
) < 31)
1105 left_ranges
= cast_rl(type
, left_ranges
);
1106 right_ranges
= cast_rl(type
, right_ranges
);
1108 FOR_EACH_PTR(left_ranges
, left_tmp
) {
1109 FOR_EACH_PTR(right_ranges
, right_tmp
) {
1110 if (false_comparison_range_sval(left_tmp
, comparison
, right_tmp
))
1112 } END_FOR_EACH_PTR(right_tmp
);
1113 } END_FOR_EACH_PTR(left_tmp
);
1117 /* FIXME: the _rl here stands for right left so really it should be _lr */
1118 int possibly_true_rl_LR(int comparison
, struct range_list
*a
, struct range_list
*b
, int left
)
1121 return possibly_true_rl(a
, comparison
, b
);
1123 return possibly_true_rl(b
, comparison
, a
);
1126 int possibly_false_rl_LR(int comparison
, struct range_list
*a
, struct range_list
*b
, int left
)
1129 return possibly_false_rl(a
, comparison
, b
);
1131 return possibly_false_rl(b
, comparison
, a
);
1134 int rl_has_sval(struct range_list
*rl
, sval_t sval
)
1136 struct data_range
*tmp
;
1138 FOR_EACH_PTR(rl
, tmp
) {
1139 if (sval_cmp(tmp
->min
, sval
) <= 0 &&
1140 sval_cmp(tmp
->max
, sval
) >= 0)
1142 } END_FOR_EACH_PTR(tmp
);
1146 void tack_on(struct range_list
**list
, struct data_range
*drange
)
1148 add_ptr_list(list
, drange
);
1151 void push_rl(struct range_list_stack
**rl_stack
, struct range_list
*rl
)
1153 add_ptr_list(rl_stack
, rl
);
1156 struct range_list
*pop_rl(struct range_list_stack
**rl_stack
)
1158 struct range_list
*rl
;
1160 rl
= last_ptr_list((struct ptr_list
*)*rl_stack
);
1161 delete_ptr_list_last((struct ptr_list
**)rl_stack
);
1165 struct range_list
*top_rl(struct range_list_stack
*rl_stack
)
1167 struct range_list
*rl
;
1169 rl
= last_ptr_list((struct ptr_list
*)rl_stack
);
1173 void filter_top_rl(struct range_list_stack
**rl_stack
, struct range_list
*filter
)
1175 struct range_list
*rl
;
1177 rl
= pop_rl(rl_stack
);
1178 rl
= rl_filter(rl
, filter
);
1179 push_rl(rl_stack
, rl
);
1182 struct range_list
*rl_truncate_cast(struct symbol
*type
, struct range_list
*rl
)
1184 struct data_range
*tmp
;
1185 struct range_list
*ret
= NULL
;
1191 if (!type
|| type
== rl_type(rl
))
1194 FOR_EACH_PTR(rl
, tmp
) {
1197 if (type_bits(type
) < type_bits(rl_type(rl
))) {
1198 min
.uvalue
= tmp
->min
.uvalue
& ((1ULL << type_bits(type
)) - 1);
1199 max
.uvalue
= tmp
->max
.uvalue
& ((1ULL << type_bits(type
)) - 1);
1201 if (sval_cmp(min
, max
) > 0) {
1202 min
= sval_cast(type
, min
);
1203 max
= sval_cast(type
, max
);
1205 add_range_t(type
, &ret
, min
, max
);
1206 } END_FOR_EACH_PTR(tmp
);
1211 static int rl_is_sane(struct range_list
*rl
)
1213 struct data_range
*tmp
;
1214 struct symbol
*type
;
1217 FOR_EACH_PTR(rl
, tmp
) {
1218 if (!sval_fits(type
, tmp
->min
))
1220 if (!sval_fits(type
, tmp
->max
))
1222 if (sval_cmp(tmp
->min
, tmp
->max
) > 0)
1224 } END_FOR_EACH_PTR(tmp
);
1229 static int rl_type_consistent(struct range_list
*rl
)
1231 struct data_range
*tmp
;
1232 struct symbol
*type
;
1235 FOR_EACH_PTR(rl
, tmp
) {
1236 if (type
!= tmp
->min
.type
|| type
!= tmp
->max
.type
)
1238 } END_FOR_EACH_PTR(tmp
);
1242 static struct range_list
*cast_to_bool(struct range_list
*rl
)
1244 struct data_range
*tmp
;
1245 struct range_list
*ret
= NULL
;
1248 sval_t min
= { .type
= &bool_ctype
};
1249 sval_t max
= { .type
= &bool_ctype
};
1251 FOR_EACH_PTR(rl
, tmp
) {
1252 if (tmp
->min
.value
|| tmp
->max
.value
)
1254 if (sval_is_negative(tmp
->min
) &&
1255 sval_is_negative(tmp
->max
))
1257 if (tmp
->min
.value
== 0 ||
1258 tmp
->max
.value
== 0)
1260 if (sval_is_negative(tmp
->min
) &&
1263 } END_FOR_EACH_PTR(tmp
);
1270 add_range(&ret
, min
, max
);
1274 struct range_list
*cast_rl(struct symbol
*type
, struct range_list
*rl
)
1276 struct data_range
*tmp
;
1277 struct range_list
*ret
= NULL
;
1284 if (!rl_is_sane(rl
))
1285 return alloc_whole_rl(type
);
1286 if (type
== rl_type(rl
) && rl_type_consistent(rl
))
1289 if (type
== &bool_ctype
)
1290 return cast_to_bool(rl
);
1292 FOR_EACH_PTR(rl
, tmp
) {
1293 add_range_t(type
, &ret
, tmp
->min
, tmp
->max
);
1294 } END_FOR_EACH_PTR(tmp
);
1297 return alloc_whole_rl(type
);
1302 struct range_list
*rl_invert(struct range_list
*orig
)
1304 struct range_list
*ret
= NULL
;
1305 struct data_range
*tmp
;
1306 sval_t gap_min
, abs_max
, sval
;
1310 if (type_bits(rl_type(orig
)) < 0) /* void type mostly */
1313 gap_min
= sval_type_min(rl_min(orig
).type
);
1314 abs_max
= sval_type_max(rl_max(orig
).type
);
1316 FOR_EACH_PTR(orig
, tmp
) {
1317 if (sval_cmp(tmp
->min
, gap_min
) > 0) {
1318 sval
= sval_type_val(tmp
->min
.type
, tmp
->min
.value
- 1);
1319 add_range(&ret
, gap_min
, sval
);
1321 if (sval_cmp(tmp
->max
, abs_max
) == 0)
1323 gap_min
= sval_type_val(tmp
->max
.type
, tmp
->max
.value
+ 1);
1324 } END_FOR_EACH_PTR(tmp
);
1326 if (sval_cmp(gap_min
, abs_max
) <= 0)
1327 add_range(&ret
, gap_min
, abs_max
);
1332 struct range_list
*rl_filter(struct range_list
*rl
, struct range_list
*filter
)
1334 struct data_range
*tmp
;
1336 FOR_EACH_PTR(filter
, tmp
) {
1337 rl
= remove_range(rl
, tmp
->min
, tmp
->max
);
1338 } END_FOR_EACH_PTR(tmp
);
1343 struct range_list
*rl_intersection(struct range_list
*one
, struct range_list
*two
)
1345 struct range_list
*one_orig
;
1346 struct range_list
*two_orig
;
1347 struct range_list
*ret
;
1348 struct symbol
*ret_type
;
1349 struct symbol
*small_type
;
1350 struct symbol
*large_type
;
1360 ret_type
= rl_type(one
);
1361 small_type
= rl_type(one
);
1362 large_type
= rl_type(two
);
1364 if (type_bits(rl_type(two
)) < type_bits(small_type
)) {
1365 small_type
= rl_type(two
);
1366 large_type
= rl_type(one
);
1369 one
= cast_rl(large_type
, one
);
1370 two
= cast_rl(large_type
, two
);
1373 one
= rl_invert(one
);
1374 two
= rl_invert(two
);
1376 ret
= rl_filter(ret
, one
);
1377 ret
= rl_filter(ret
, two
);
1379 one
= cast_rl(small_type
, one_orig
);
1380 two
= cast_rl(small_type
, two_orig
);
1382 one
= rl_invert(one
);
1383 two
= rl_invert(two
);
1385 ret
= cast_rl(small_type
, ret
);
1386 ret
= rl_filter(ret
, one
);
1387 ret
= rl_filter(ret
, two
);
1389 return cast_rl(ret_type
, ret
);
1392 static struct range_list
*handle_mod_rl(struct range_list
*left
, struct range_list
*right
)
1397 max
= rl_max(right
);
1398 if (sval_is_max(max
))
1403 if (sval_is_negative(max
))
1405 if (sval_cmp(rl_max(left
), max
) < 0)
1409 return alloc_rl(zero
, max
);
1412 static struct range_list
*get_neg_rl(struct range_list
*rl
)
1414 struct data_range
*tmp
;
1415 struct data_range
*new;
1416 struct range_list
*ret
= NULL
;
1420 if (sval_is_positive(rl_min(rl
)))
1423 FOR_EACH_PTR(rl
, tmp
) {
1424 if (sval_is_positive(tmp
->min
))
1426 if (sval_is_positive(tmp
->max
)) {
1427 new = alloc_range(tmp
->min
, tmp
->max
);
1428 new->max
.value
= -1;
1429 add_range(&ret
, new->min
, new->max
);
1432 add_range(&ret
, tmp
->min
, tmp
->max
);
1433 } END_FOR_EACH_PTR(tmp
);
1438 static struct range_list
*get_pos_rl(struct range_list
*rl
)
1440 struct data_range
*tmp
;
1441 struct data_range
*new;
1442 struct range_list
*ret
= NULL
;
1446 if (sval_is_negative(rl_max(rl
)))
1449 FOR_EACH_PTR(rl
, tmp
) {
1450 if (sval_is_negative(tmp
->max
))
1452 if (sval_is_positive(tmp
->min
)) {
1453 add_range(&ret
, tmp
->min
, tmp
->max
);
1456 new = alloc_range(tmp
->min
, tmp
->max
);
1458 add_range(&ret
, new->min
, new->max
);
1459 } END_FOR_EACH_PTR(tmp
);
1464 static struct range_list
*divide_rl_helper(struct range_list
*left
, struct range_list
*right
)
1466 sval_t right_min
, right_max
;
1469 if (!left
|| !right
)
1472 /* let's assume we never divide by zero */
1473 right_min
= rl_min(right
);
1474 right_max
= rl_max(right
);
1475 if (right_min
.value
== 0 && right_max
.value
== 0)
1477 if (right_min
.value
== 0)
1478 right_min
.value
= 1;
1479 if (right_max
.value
== 0)
1480 right_max
.value
= -1;
1482 max
= sval_binop(rl_max(left
), '/', right_min
);
1483 min
= sval_binop(rl_min(left
), '/', right_max
);
1485 return alloc_rl(min
, max
);
1488 static struct range_list
*handle_divide_rl(struct range_list
*left
, struct range_list
*right
)
1490 struct range_list
*left_neg
, *left_pos
, *right_neg
, *right_pos
;
1491 struct range_list
*neg_neg
, *neg_pos
, *pos_neg
, *pos_pos
;
1492 struct range_list
*ret
;
1494 if (is_whole_rl(right
))
1497 left_neg
= get_neg_rl(left
);
1498 left_pos
= get_pos_rl(left
);
1499 right_neg
= get_neg_rl(right
);
1500 right_pos
= get_pos_rl(right
);
1502 neg_neg
= divide_rl_helper(left_neg
, right_neg
);
1503 neg_pos
= divide_rl_helper(left_neg
, right_pos
);
1504 pos_neg
= divide_rl_helper(left_pos
, right_neg
);
1505 pos_pos
= divide_rl_helper(left_pos
, right_pos
);
1507 ret
= rl_union(neg_neg
, neg_pos
);
1508 ret
= rl_union(ret
, pos_neg
);
1509 return rl_union(ret
, pos_pos
);
1512 static struct range_list
*handle_add_mult_rl(struct range_list
*left
, int op
, struct range_list
*right
)
1516 if (sval_binop_overflows(rl_min(left
), op
, rl_min(right
)))
1518 min
= sval_binop(rl_min(left
), op
, rl_min(right
));
1520 if (sval_binop_overflows(rl_max(left
), op
, rl_max(right
)))
1522 max
= sval_binop(rl_max(left
), op
, rl_max(right
));
1524 return alloc_rl(min
, max
);
1527 static struct range_list
*handle_sub_rl(struct range_list
*left_orig
, struct range_list
*right_orig
)
1529 struct range_list
*left_rl
, *right_rl
;
1530 struct symbol
*type
;
1532 sval_t min_ll
, max_ll
, res_ll
;
1535 /* TODO: These things should totally be using dranges where possible */
1537 if (!left_orig
|| !right_orig
)
1541 if (type_positive_bits(rl_type(left_orig
)) > type_positive_bits(type
))
1542 type
= rl_type(left_orig
);
1543 if (type_positive_bits(rl_type(right_orig
)) > type_positive_bits(type
))
1544 type
= rl_type(right_orig
);
1546 left_rl
= cast_rl(type
, left_orig
);
1547 right_rl
= cast_rl(type
, right_orig
);
1549 max
= rl_max(left_rl
);
1550 min
= sval_type_min(type
);
1552 min_ll
= rl_min(left_rl
);
1553 min_ll
.type
= &llong_ctype
;
1554 max_ll
= rl_max(right_rl
);
1555 max_ll
.type
= &llong_ctype
;
1557 res_ll
.value
= min_ll
.value
- max_ll
.value
;
1559 if (!sval_binop_overflows(rl_min(left_rl
), '-', rl_max(right_rl
))) {
1560 tmp
= sval_binop(rl_min(left_rl
), '-', rl_max(right_rl
));
1561 if (sval_cmp(tmp
, min
) > 0)
1563 } else if (type_positive_bits(type
) < 63 &&
1564 !sval_binop_overflows(min_ll
, '-', max_ll
) &&
1565 (min
.value
!= 0 && sval_cmp(res_ll
, min
) >= 0)) {
1566 struct range_list
*left_casted
, *right_casted
, *result
;
1568 left_casted
= cast_rl(&llong_ctype
, left_orig
);
1569 right_casted
= cast_rl(&llong_ctype
, right_orig
);
1570 result
= handle_sub_rl(left_casted
, right_casted
);
1571 return cast_rl(type
, result
);
1574 if (!sval_is_max(rl_max(left_rl
))) {
1575 tmp
= sval_binop(rl_max(left_rl
), '-', rl_min(right_rl
));
1576 if (sval_cmp(tmp
, max
) < 0)
1580 if (sval_is_min(min
) && sval_is_max(max
))
1583 return alloc_rl(min
, max
);
1586 static unsigned long long rl_bits_always_set(struct range_list
*rl
)
1588 return sval_fls_mask(rl_min(rl
));
1591 static unsigned long long rl_bits_maybe_set(struct range_list
*rl
)
1593 return sval_fls_mask(rl_max(rl
));
1596 static struct range_list
*handle_OR_rl(struct range_list
*left
, struct range_list
*right
)
1598 unsigned long long left_min
, left_max
, right_min
, right_max
;
1602 if ((rl_to_sval(left
, &sval
) || rl_to_sval(right
, &sval
)) &&
1603 !sval_binop_overflows(rl_max(left
), '+', rl_max(right
)))
1604 return rl_binop(left
, '+', right
);
1606 left_min
= rl_bits_always_set(left
);
1607 left_max
= rl_bits_maybe_set(left
);
1608 right_min
= rl_bits_always_set(right
);
1609 right_max
= rl_bits_maybe_set(right
);
1611 min
.type
= max
.type
= &ullong_ctype
;
1612 min
.uvalue
= left_min
| right_min
;
1613 max
.uvalue
= left_max
| right_max
;
1615 return cast_rl(rl_type(left
), alloc_rl(min
, max
));
1618 static struct range_list
*handle_XOR_rl(struct range_list
*left
, struct range_list
*right
)
1620 unsigned long long left_set
, left_maybe
;
1621 unsigned long long right_set
, right_maybe
;
1624 left_set
= rl_bits_always_set(left
);
1625 left_maybe
= rl_bits_maybe_set(left
);
1627 right_set
= rl_bits_always_set(right
);
1628 right_maybe
= rl_bits_maybe_set(right
);
1630 zero
= max
= rl_min(left
);
1632 max
.uvalue
= fls_mask((left_maybe
| right_maybe
) ^ (left_set
& right_set
));
1634 return cast_rl(rl_type(left
), alloc_rl(zero
, max
));
1637 static struct range_list
*handle_AND_rl(struct range_list
*left
, struct range_list
*right
)
1639 unsigned long long left_set
, left_maybe
;
1640 unsigned long long right_set
, right_maybe
;
1645 left_set
= rl_bits_always_set(left
);
1646 left_maybe
= rl_bits_maybe_set(left
);
1648 right_set
= rl_bits_always_set(right
);
1649 right_maybe
= rl_bits_maybe_set(right
);
1651 zero
= max
= rl_min(left
);
1653 max
.uvalue
= fls_mask((left_maybe
| right_maybe
) ^ (left_set
& right_set
));
1655 return cast_rl(rl_type(left
), alloc_rl(zero
, max
));
1658 struct range_list
*rl_binop(struct range_list
*left
, int op
, struct range_list
*right
)
1660 struct symbol
*cast_type
;
1661 sval_t left_sval
, right_sval
;
1662 struct range_list
*ret
= NULL
;
1664 cast_type
= rl_type(left
);
1665 if (sval_type_max(rl_type(left
)).uvalue
< sval_type_max(rl_type(right
)).uvalue
)
1666 cast_type
= rl_type(right
);
1667 if (sval_type_max(cast_type
).uvalue
< INT_MAX
)
1668 cast_type
= &int_ctype
;
1670 left
= cast_rl(cast_type
, left
);
1671 right
= cast_rl(cast_type
, right
);
1673 if (!left
&& !right
)
1676 if (rl_to_sval(left
, &left_sval
) && rl_to_sval(right
, &right_sval
)) {
1677 sval_t val
= sval_binop(left_sval
, op
, right_sval
);
1678 return alloc_rl(val
, val
);
1683 ret
= handle_mod_rl(left
, right
);
1686 ret
= handle_divide_rl(left
, right
);
1690 ret
= handle_add_mult_rl(left
, op
, right
);
1693 ret
= handle_OR_rl(left
, right
);
1696 ret
= handle_XOR_rl(left
, right
);
1699 ret
= handle_AND_rl(left
, right
);
1702 ret
= handle_sub_rl(left
, right
);
1704 /* FIXME: Do the rest as well */
1705 case SPECIAL_RIGHTSHIFT
:
1706 case SPECIAL_LEFTSHIFT
:
1713 void free_data_info_allocs(void)
1715 struct allocator_struct
*desc
= &data_info_allocator
;
1716 struct allocation_blob
*blob
= desc
->blobs
;
1722 desc
->allocations
= 0;
1723 desc
->total_bytes
= 0;
1724 desc
->useful_bytes
= 0;
1725 desc
->freelist
= NULL
;
1727 struct allocation_blob
*next
= blob
->next
;
1728 blob_free(blob
, desc
->chunking
);
1731 clear_data_range_alloc();
1734 void split_comparison_rl(struct range_list
*left_orig
, int op
, struct range_list
*right_orig
,
1735 struct range_list
**left_true_rl
, struct range_list
**left_false_rl
,
1736 struct range_list
**right_true_rl
, struct range_list
**right_false_rl
)
1738 struct range_list
*left_true
, *left_false
;
1739 struct range_list
*right_true
, *right_false
;
1742 min
= sval_type_min(rl_type(left_orig
));
1743 max
= sval_type_max(rl_type(left_orig
));
1745 left_true
= clone_rl(left_orig
);
1746 left_false
= clone_rl(left_orig
);
1747 right_true
= clone_rl(right_orig
);
1748 right_false
= clone_rl(right_orig
);
1752 case SPECIAL_UNSIGNED_LT
:
1753 left_true
= remove_range(left_orig
, rl_max(right_orig
), max
);
1754 if (!sval_is_min(rl_min(right_orig
))) {
1755 left_false
= remove_range(left_orig
, min
, sub_one(rl_min(right_orig
)));
1758 right_true
= remove_range(right_orig
, min
, rl_min(left_orig
));
1759 if (!sval_is_max(rl_max(left_orig
)))
1760 right_false
= remove_range(right_orig
, add_one(rl_max(left_orig
)), max
);
1762 case SPECIAL_UNSIGNED_LTE
:
1764 if (!sval_is_max(rl_max(right_orig
)))
1765 left_true
= remove_range(left_orig
, add_one(rl_max(right_orig
)), max
);
1766 left_false
= remove_range(left_orig
, min
, rl_min(right_orig
));
1768 if (!sval_is_min(rl_min(left_orig
)))
1769 right_true
= remove_range(right_orig
, min
, sub_one(rl_min(left_orig
)));
1770 right_false
= remove_range(right_orig
, rl_max(left_orig
), max
);
1772 if (sval_cmp(rl_min(left_orig
), rl_min(right_orig
)) == 0)
1773 left_false
= remove_range(left_false
, rl_min(left_orig
), rl_min(left_orig
));
1774 if (sval_cmp(rl_max(left_orig
), rl_max(right_orig
)) == 0)
1775 right_false
= remove_range(right_false
, rl_max(left_orig
), rl_max(left_orig
));
1778 if (!sval_is_max(rl_max(right_orig
))) {
1779 left_true
= remove_range(left_true
, add_one(rl_max(right_orig
)), max
);
1781 if (!sval_is_min(rl_min(right_orig
))) {
1782 left_true
= remove_range(left_true
, min
, sub_one(rl_min(right_orig
)));
1784 if (sval_cmp(rl_min(right_orig
), rl_max(right_orig
)) == 0)
1785 left_false
= remove_range(left_orig
, rl_min(right_orig
), rl_min(right_orig
));
1787 if (!sval_is_max(rl_max(left_orig
)))
1788 right_true
= remove_range(right_true
, add_one(rl_max(left_orig
)), max
);
1789 if (!sval_is_min(rl_min(left_orig
)))
1790 right_true
= remove_range(right_true
, min
, sub_one(rl_min(left_orig
)));
1791 if (sval_cmp(rl_min(left_orig
), rl_max(left_orig
)) == 0)
1792 right_false
= remove_range(right_orig
, rl_min(left_orig
), rl_min(left_orig
));
1794 case SPECIAL_UNSIGNED_GTE
:
1796 if (!sval_is_min(rl_min(right_orig
)))
1797 left_true
= remove_range(left_orig
, min
, sub_one(rl_min(right_orig
)));
1798 left_false
= remove_range(left_orig
, rl_max(right_orig
), max
);
1800 if (!sval_is_max(rl_max(left_orig
)))
1801 right_true
= remove_range(right_orig
, add_one(rl_max(left_orig
)), max
);
1802 right_false
= remove_range(right_orig
, min
, rl_min(left_orig
));
1804 if (sval_cmp(rl_min(left_orig
), rl_min(right_orig
)) == 0)
1805 right_false
= remove_range(right_false
, rl_min(left_orig
), rl_min(left_orig
));
1806 if (sval_cmp(rl_max(left_orig
), rl_max(right_orig
)) == 0)
1807 left_false
= remove_range(left_false
, rl_max(left_orig
), rl_max(left_orig
));
1810 case SPECIAL_UNSIGNED_GT
:
1811 left_true
= remove_range(left_orig
, min
, rl_min(right_orig
));
1812 if (!sval_is_max(rl_max(right_orig
)))
1813 left_false
= remove_range(left_orig
, add_one(rl_max(right_orig
)), max
);
1815 right_true
= remove_range(right_orig
, rl_max(left_orig
), max
);
1816 if (!sval_is_min(rl_min(left_orig
)))
1817 right_false
= remove_range(right_orig
, min
, sub_one(rl_min(left_orig
)));
1819 case SPECIAL_NOTEQUAL
:
1820 if (!sval_is_max(rl_max(right_orig
)))
1821 left_false
= remove_range(left_false
, add_one(rl_max(right_orig
)), max
);
1822 if (!sval_is_min(rl_min(right_orig
)))
1823 left_false
= remove_range(left_false
, min
, sub_one(rl_min(right_orig
)));
1824 if (sval_cmp(rl_min(right_orig
), rl_max(right_orig
)) == 0)
1825 left_true
= remove_range(left_orig
, rl_min(right_orig
), rl_min(right_orig
));
1827 if (!sval_is_max(rl_max(left_orig
)))
1828 right_false
= remove_range(right_false
, add_one(rl_max(left_orig
)), max
);
1829 if (!sval_is_min(rl_min(left_orig
)))
1830 right_false
= remove_range(right_false
, min
, sub_one(rl_min(left_orig
)));
1831 if (sval_cmp(rl_min(left_orig
), rl_max(left_orig
)) == 0)
1832 right_true
= remove_range(right_orig
, rl_min(left_orig
), rl_min(left_orig
));
1835 sm_msg("internal error: unhandled comparison %d", op
);
1840 *left_true_rl
= left_true
;
1841 *left_false_rl
= left_false
;
1843 if (right_true_rl
) {
1844 *right_true_rl
= right_true
;
1845 *right_false_rl
= right_false
;