2 * Copyright (C) 2009 Dan Carpenter.
4 * This program is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU General Public License
6 * as published by the Free Software Foundation; either version 2
7 * of the License, or (at your option) any later version.
9 * This program is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 * GNU General Public License for more details.
14 * You should have received a copy of the GNU General Public License
15 * along with this program; if not, see http://www.gnu.org/copyleft/gpl.txt
20 #include "smatch_extra.h"
21 #include "smatch_slist.h"
23 ALLOCATOR(data_info
, "smatch extra data");
24 ALLOCATOR(data_range
, "data range");
25 __DO_ALLOCATOR(struct data_range
, sizeof(struct data_range
), __alignof__(struct data_range
),
26 "permanent ranges", perm_data_range
);
28 char *show_rl(struct range_list
*list
)
30 struct data_range
*tmp
;
36 FOR_EACH_PTR(list
, tmp
) {
38 strncat(full
, ",", 254 - strlen(full
));
39 if (sval_cmp(tmp
->min
, tmp
->max
) == 0) {
40 strncat(full
, sval_to_str(tmp
->min
), 254 - strlen(full
));
43 strncat(full
, sval_to_str(tmp
->min
), 254 - strlen(full
));
44 strncat(full
, "-", 254 - strlen(full
));
45 strncat(full
, sval_to_str(tmp
->max
), 254 - strlen(full
));
46 } END_FOR_EACH_PTR(tmp
);
47 return alloc_sname(full
);
50 static int sval_too_big(struct symbol
*type
, sval_t sval
)
52 if (type_bits(type
) == 64)
54 if (sval
.uvalue
> ((1ULL << type_bits(type
)) - 1))
59 static void add_range_t(struct symbol
*type
, struct range_list
**rl
, sval_t min
, sval_t max
)
61 /* If we're just adding a number, cast it and add it */
62 if (sval_cmp(min
, max
) == 0) {
63 add_range(rl
, sval_cast(type
, min
), sval_cast(type
, max
));
67 /* If the range is within the type range then add it */
68 if (sval_fits(type
, min
) && sval_fits(type
, max
)) {
69 add_range(rl
, sval_cast(type
, min
), sval_cast(type
, max
));
74 * If the range we are adding has more bits than the range type then
75 * add the whole range type. Eg:
76 * 0x8000000000000000 - 0xf000000000000000 -> cast to int
77 * This isn't totally the right thing to do. We could be more granular.
79 if (sval_too_big(type
, min
) || sval_too_big(type
, max
)) {
80 add_range(rl
, sval_type_min(type
), sval_type_max(type
));
84 /* Cast negative values to high positive values */
85 if (sval_is_negative(min
) && type_unsigned(type
)) {
86 if (sval_is_positive(max
)) {
87 if (sval_too_high(type
, max
)) {
88 add_range(rl
, sval_type_min(type
), sval_type_max(type
));
91 add_range(rl
, sval_type_val(type
, 0), sval_cast(type
, max
));
92 max
= sval_type_max(type
);
94 max
= sval_cast(type
, max
);
96 min
= sval_cast(type
, min
);
97 add_range(rl
, min
, max
);
100 /* Cast high positive numbers to negative */
101 if (sval_unsigned(max
) && sval_is_negative(sval_cast(type
, max
))) {
102 if (!sval_is_negative(sval_cast(type
, min
))) {
103 add_range(rl
, sval_cast(type
, min
), sval_type_max(type
));
104 min
= sval_type_min(type
);
106 min
= sval_cast(type
, min
);
108 max
= sval_cast(type
, max
);
109 add_range(rl
, min
, max
);
112 add_range(rl
, sval_cast(type
, min
), sval_cast(type
, max
));
116 static int str_to_comparison_arg_helper(const char *str
,
117 struct expression
*call
, int *comparison
,
118 struct expression
**arg
, char **endp
)
121 char *c
= (char *)str
;
130 *comparison
= SPECIAL_LTE
;
135 } else if (*c
== '=') {
138 *comparison
= SPECIAL_EQUAL
;
139 } else if (*c
== '>') {
142 *comparison
= SPECIAL_GTE
;
147 } else if (*c
== '!') {
150 *comparison
= SPECIAL_NOTEQUAL
;
159 param
= strtoll(c
, &c
, 10);
160 c
++; /* skip the ']' character */
166 *arg
= get_argument_from_call_expr(call
->args
, param
);
172 int str_to_comparison_arg(const char *str
, struct expression
*call
, int *comparison
, struct expression
**arg
)
181 return str_to_comparison_arg_helper(str
, call
, comparison
, arg
, NULL
);
184 static int get_val_from_key(int use_max
, struct symbol
*type
, char *c
, struct expression
*call
, char **endp
, sval_t
*sval
)
186 struct expression
*arg
;
191 ret
= sval_type_max(type
);
193 ret
= sval_type_min(type
);
195 if (!str_to_comparison_arg_helper(c
, call
, &comparison
, &arg
, endp
)) {
200 if (use_max
&& get_implied_max(arg
, &tmp
)) {
202 if (comparison
== '<') {
204 ret
= sval_binop(ret
, '-', tmp
);
207 if (!use_max
&& get_implied_min(arg
, &tmp
)) {
209 if (comparison
== '>') {
211 ret
= sval_binop(ret
, '+', tmp
);
219 static sval_t
add_one(sval_t sval
)
225 static sval_t
sub_one(sval_t sval
)
231 void filter_by_comparison(struct range_list
**rl
, int comparison
, struct range_list
*right
)
233 struct range_list
*left_orig
= *rl
;
234 struct range_list
*right_orig
= right
;
235 struct range_list
*ret_rl
= *rl
;
236 struct symbol
*cast_type
;
239 cast_type
= rl_type(left_orig
);
240 if (sval_type_max(rl_type(left_orig
)).uvalue
< sval_type_max(rl_type(right_orig
)).uvalue
)
241 cast_type
= rl_type(right_orig
);
242 if (sval_type_max(cast_type
).uvalue
< INT_MAX
)
243 cast_type
= &int_ctype
;
245 min
= sval_type_min(cast_type
);
246 max
= sval_type_max(cast_type
);
247 left_orig
= cast_rl(cast_type
, left_orig
);
248 right_orig
= cast_rl(cast_type
, right_orig
);
250 switch (comparison
) {
252 case SPECIAL_UNSIGNED_LT
:
253 ret_rl
= remove_range(left_orig
, rl_max(right_orig
), max
);
256 case SPECIAL_UNSIGNED_LTE
:
257 if (!sval_is_max(rl_max(right_orig
)))
258 ret_rl
= remove_range(left_orig
, add_one(rl_max(right_orig
)), max
);
261 if (!sval_is_max(rl_max(right_orig
)))
262 ret_rl
= remove_range(ret_rl
, add_one(rl_max(right_orig
)), max
);
263 if (!sval_is_min(rl_min(right_orig
)))
264 ret_rl
= remove_range(ret_rl
, min
, sub_one(rl_min(right_orig
)));
267 case SPECIAL_UNSIGNED_GTE
:
268 if (!sval_is_min(rl_min(right_orig
)))
269 ret_rl
= remove_range(left_orig
, min
, sub_one(rl_min(right_orig
)));
272 case SPECIAL_UNSIGNED_GT
:
273 ret_rl
= remove_range(left_orig
, min
, rl_min(right_orig
));
275 case SPECIAL_NOTEQUAL
:
276 if (sval_cmp(rl_min(right_orig
), rl_max(right_orig
)) == 0)
277 ret_rl
= remove_range(left_orig
, rl_min(right_orig
), rl_min(right_orig
));
280 sm_msg("internal error: unhandled comparison %s", show_special(comparison
));
284 *rl
= cast_rl(rl_type(*rl
), ret_rl
);
287 static struct range_list
*filter_by_comparison_call(char *c
, struct expression
*call
, char **endp
, struct range_list
*start_rl
)
289 struct expression
*arg
;
290 struct range_list
*right_orig
;
293 if (!str_to_comparison_arg_helper(c
, call
, &comparison
, &arg
, endp
))
296 if (!get_implied_rl(arg
, &right_orig
))
299 if (rl_type(start_rl
) == &int_ctype
&&
300 sval_is_negative(rl_min(start_rl
)) &&
301 type_unsigned(rl_type(right_orig
)))
302 right_orig
= cast_rl(&int_ctype
, right_orig
);
304 filter_by_comparison(&start_rl
, comparison
, right_orig
);
308 static sval_t
parse_val(int use_max
, struct expression
*call
, struct symbol
*type
, char *c
, char **endp
)
313 if (!strncmp(start
, "max", 3)) {
314 ret
= sval_type_max(type
);
316 } else if (!strncmp(start
, "u64max", 6)) {
317 ret
= sval_type_val(type
, ULLONG_MAX
);
319 } else if (!strncmp(start
, "s64max", 6)) {
320 ret
= sval_type_val(type
, LLONG_MAX
);
322 } else if (!strncmp(start
, "u32max", 6)) {
323 ret
= sval_type_val(type
, UINT_MAX
);
325 } else if (!strncmp(start
, "s32max", 6)) {
326 ret
= sval_type_val(type
, INT_MAX
);
328 } else if (!strncmp(start
, "u16max", 6)) {
329 ret
= sval_type_val(type
, USHRT_MAX
);
331 } else if (!strncmp(start
, "s16max", 6)) {
332 ret
= sval_type_val(type
, SHRT_MAX
);
334 } else if (!strncmp(start
, "min", 3)) {
335 ret
= sval_type_min(type
);
337 } else if (!strncmp(start
, "s64min", 6)) {
338 ret
= sval_type_val(type
, LLONG_MIN
);
340 } else if (!strncmp(start
, "s32min", 6)) {
341 ret
= sval_type_val(type
, INT_MIN
);
343 } else if (!strncmp(start
, "s16min", 6)) {
344 ret
= sval_type_val(type
, SHRT_MIN
);
346 } else if (!strncmp(start
, "long_min", 8)) {
347 ret
= sval_type_val(type
, LONG_MIN
);
349 } else if (!strncmp(start
, "long_max", 8)) {
350 ret
= sval_type_val(type
, LONG_MAX
);
352 } else if (!strncmp(start
, "ulong_max", 9)) {
353 ret
= sval_type_val(type
, ULONG_MAX
);
355 } else if (!strncmp(start
, "ptr_max", 7)) {
356 ret
= sval_type_val(type
, valid_ptr_max
);
358 } else if (start
[0] == '[') {
359 /* this parses [==p0] comparisons */
360 get_val_from_key(1, type
, start
, call
, &c
, &ret
);
361 } else if (type_positive_bits(type
) == 64) {
362 ret
= sval_type_val(type
, strtoull(start
, &c
, 10));
364 ret
= sval_type_val(type
, strtoll(start
, &c
, 10));
370 static char *jump_to_call_math(char *value
)
374 while (*c
&& *c
!= '[')
380 if (*c
== '<' || *c
== '=' || *c
== '>' || *c
== '!')
386 static void str_to_rl_helper(struct expression
*call
, struct symbol
*type
, char *str
, char **endp
, struct range_list
**rl
)
388 struct range_list
*rl_tmp
= NULL
;
392 min
= sval_type_min(type
);
393 max
= sval_type_max(type
);
395 while (*c
!= '\0' && *c
!= '[') {
398 min
= parse_val(0, call
, type
, c
, &c
);
402 if (*c
== '\0' || *c
== '[') {
403 add_range_t(type
, &rl_tmp
, min
, min
);
407 add_range_t(type
, &rl_tmp
, min
, min
);
412 sm_msg("debug XXX: trouble parsing %s c = %s", str
, c
);
418 max
= parse_val(1, call
, type
, c
, &c
);
419 add_range_t(type
, &rl_tmp
, min
, max
);
430 static void str_to_dinfo(struct expression
*call
, struct symbol
*type
, char *value
, struct data_info
*dinfo
)
432 struct range_list
*math_rl
;
435 struct range_list
*rl
= NULL
;
440 if (strcmp(value
, "empty") == 0)
443 if (strncmp(value
, "[==$", 4) == 0) {
444 struct expression
*arg
;
447 if (!str_to_comparison_arg(value
, call
, &comparison
, &arg
))
449 if (!get_implied_rl(arg
, &rl
))
454 str_to_rl_helper(call
, type
, value
, &c
, &rl
);
458 call_math
= jump_to_call_math(value
);
459 if (call_math
&& parse_call_math_rl(call
, call_math
, &math_rl
)) {
460 rl
= rl_intersection(rl
, math_rl
);
465 * For now if we already tried to handle the call math and couldn't
466 * figure it out then bail.
468 if (jump_to_call_math(c
) == c
+ 1)
471 rl
= filter_by_comparison_call(c
, call
, &c
, rl
);
474 rl
= cast_rl(type
, rl
);
475 dinfo
->value_ranges
= rl
;
478 void str_to_rl(struct symbol
*type
, char *value
, struct range_list
**rl
)
480 struct data_info dinfo
= {};
482 str_to_dinfo(NULL
, type
, value
, &dinfo
);
483 *rl
= dinfo
.value_ranges
;
486 void call_results_to_rl(struct expression
*expr
, struct symbol
*type
, char *value
, struct range_list
**rl
)
488 struct data_info dinfo
= {};
490 str_to_dinfo(strip_expr(expr
), type
, value
, &dinfo
);
491 *rl
= dinfo
.value_ranges
;
494 int is_whole_rl(struct range_list
*rl
)
496 struct data_range
*drange
;
498 if (ptr_list_empty(rl
))
500 drange
= first_ptr_list((struct ptr_list
*)rl
);
501 if (sval_is_min(drange
->min
) && sval_is_max(drange
->max
))
506 int is_whole_rl_non_zero(struct range_list
*rl
)
508 struct data_range
*drange
;
510 if (ptr_list_empty(rl
))
512 drange
= first_ptr_list((struct ptr_list
*)rl
);
513 if (sval_unsigned(drange
->min
) &&
514 drange
->min
.value
== 1 &&
515 sval_is_max(drange
->max
))
517 if (!sval_is_min(drange
->min
) || drange
->max
.value
!= -1)
519 drange
= last_ptr_list((struct ptr_list
*)rl
);
520 if (drange
->min
.value
!= 1 || !sval_is_max(drange
->max
))
525 sval_t
rl_min(struct range_list
*rl
)
527 struct data_range
*drange
;
530 ret
.type
= &llong_ctype
;
531 ret
.value
= LLONG_MIN
;
532 if (ptr_list_empty(rl
))
534 drange
= first_ptr_list((struct ptr_list
*)rl
);
538 sval_t
rl_max(struct range_list
*rl
)
540 struct data_range
*drange
;
543 ret
.type
= &llong_ctype
;
544 ret
.value
= LLONG_MAX
;
545 if (ptr_list_empty(rl
))
547 drange
= last_ptr_list((struct ptr_list
*)rl
);
551 int rl_to_sval(struct range_list
*rl
, sval_t
*sval
)
560 if (sval_cmp(min
, max
) != 0)
566 struct symbol
*rl_type(struct range_list
*rl
)
570 return rl_min(rl
).type
;
573 static struct data_range
*alloc_range_helper_sval(sval_t min
, sval_t max
, int perm
)
575 struct data_range
*ret
;
578 ret
= __alloc_perm_data_range(0);
580 ret
= __alloc_data_range(0);
586 struct data_range
*alloc_range(sval_t min
, sval_t max
)
588 return alloc_range_helper_sval(min
, max
, 0);
591 struct data_range
*alloc_range_perm(sval_t min
, sval_t max
)
593 return alloc_range_helper_sval(min
, max
, 1);
596 struct range_list
*alloc_rl(sval_t min
, sval_t max
)
598 struct range_list
*rl
= NULL
;
600 if (sval_cmp(min
, max
) > 0)
601 return alloc_whole_rl(min
.type
);
603 add_range(&rl
, min
, max
);
607 struct range_list
*alloc_whole_rl(struct symbol
*type
)
609 if (!type
|| type_positive_bits(type
) < 0)
611 if (type
->type
== SYM_ARRAY
)
614 return alloc_rl(sval_type_min(type
), sval_type_max(type
));
617 void add_range(struct range_list
**list
, sval_t min
, sval_t max
)
619 struct data_range
*tmp
;
620 struct data_range
*new = NULL
;
624 * There is at least on valid reason why the types might be confusing
625 * and that's when you have a void pointer and on some paths you treat
626 * it as a u8 pointer and on other paths you treat it as a u16 pointer.
627 * This case is hard to deal with.
629 * There are other cases where we probably should be more specific about
630 * the types than we are. For example, we end up merging a lot of ulong
631 * with pointers and I have not figured out why we do that.
633 * But this hack works for both cases, I think. We cast it to pointers
634 * or we use the bigger size.
637 if (*list
&& rl_type(*list
) != min
.type
) {
638 if (rl_type(*list
)->type
== SYM_PTR
) {
639 min
= sval_cast(rl_type(*list
), min
);
640 max
= sval_cast(rl_type(*list
), max
);
641 } else if (min
.type
->type
== SYM_PTR
) {
642 *list
= cast_rl(min
.type
, *list
);
643 } else if (type_bits(rl_type(*list
)) >= type_bits(min
.type
)) {
644 min
= sval_cast(rl_type(*list
), min
);
645 max
= sval_cast(rl_type(*list
), max
);
647 *list
= cast_rl(min
.type
, *list
);
651 if (sval_cmp(min
, max
) > 0) {
652 min
= sval_type_min(min
.type
);
653 max
= sval_type_max(min
.type
);
657 * FIXME: This has a problem merging a range_list like: min-0,3-max
658 * with a range like 1-2. You end up with min-2,3-max instead of
661 FOR_EACH_PTR(*list
, tmp
) {
663 /* Sometimes we overlap with more than one range
664 so we have to delete or modify the next range. */
665 if (!sval_is_max(max
) && max
.value
+ 1 == tmp
->min
.value
) {
666 /* join 2 ranges here */
668 DELETE_CURRENT_PTR(tmp
);
672 /* Doesn't overlap with the next one. */
673 if (sval_cmp(max
, tmp
->min
) < 0)
676 if (sval_cmp(max
, tmp
->max
) <= 0) {
677 /* Partially overlaps the next one. */
679 DELETE_CURRENT_PTR(tmp
);
682 /* Completely overlaps the next one. */
683 DELETE_CURRENT_PTR(tmp
);
684 /* there could be more ranges to delete */
688 if (!sval_is_max(max
) && max
.value
+ 1 == tmp
->min
.value
) {
689 /* join 2 ranges into a big range */
690 new = alloc_range(min
, tmp
->max
);
691 REPLACE_CURRENT_PTR(tmp
, new);
694 if (sval_cmp(max
, tmp
->min
) < 0) { /* new range entirely below */
695 new = alloc_range(min
, max
);
696 INSERT_CURRENT(new, tmp
);
699 if (sval_cmp(min
, tmp
->min
) < 0) { /* new range partially below */
700 if (sval_cmp(max
, tmp
->max
) < 0)
704 new = alloc_range(min
, max
);
705 REPLACE_CURRENT_PTR(tmp
, new);
710 if (sval_cmp(max
, tmp
->max
) <= 0) /* new range already included */
712 if (sval_cmp(min
, tmp
->max
) <= 0) { /* new range partially above */
714 new = alloc_range(min
, max
);
715 REPLACE_CURRENT_PTR(tmp
, new);
719 if (!sval_is_min(min
) && min
.value
- 1 == tmp
->max
.value
) {
720 /* join 2 ranges into a big range */
721 new = alloc_range(tmp
->min
, max
);
722 REPLACE_CURRENT_PTR(tmp
, new);
726 /* the new range is entirely above the existing ranges */
727 } END_FOR_EACH_PTR(tmp
);
730 new = alloc_range(min
, max
);
731 add_ptr_list(list
, new);
734 struct range_list
*clone_rl(struct range_list
*list
)
736 struct data_range
*tmp
;
737 struct range_list
*ret
= NULL
;
739 FOR_EACH_PTR(list
, tmp
) {
740 add_ptr_list(&ret
, tmp
);
741 } END_FOR_EACH_PTR(tmp
);
745 struct range_list
*clone_rl_permanent(struct range_list
*list
)
747 struct data_range
*tmp
;
748 struct data_range
*new;
749 struct range_list
*ret
= NULL
;
751 FOR_EACH_PTR(list
, tmp
) {
752 new = alloc_range_perm(tmp
->min
, tmp
->max
);
753 add_ptr_list(&ret
, new);
754 } END_FOR_EACH_PTR(tmp
);
758 struct range_list
*rl_union(struct range_list
*one
, struct range_list
*two
)
760 struct data_range
*tmp
;
761 struct range_list
*ret
= NULL
;
763 FOR_EACH_PTR(one
, tmp
) {
764 add_range(&ret
, tmp
->min
, tmp
->max
);
765 } END_FOR_EACH_PTR(tmp
);
766 FOR_EACH_PTR(two
, tmp
) {
767 add_range(&ret
, tmp
->min
, tmp
->max
);
768 } END_FOR_EACH_PTR(tmp
);
772 struct range_list
*remove_range(struct range_list
*list
, sval_t min
, sval_t max
)
774 struct data_range
*tmp
;
775 struct range_list
*ret
= NULL
;
780 min
= sval_cast(rl_type(list
), min
);
781 max
= sval_cast(rl_type(list
), max
);
782 if (sval_cmp(min
, max
) > 0) {
788 FOR_EACH_PTR(list
, tmp
) {
789 if (sval_cmp(tmp
->max
, min
) < 0) {
790 add_range(&ret
, tmp
->min
, tmp
->max
);
793 if (sval_cmp(tmp
->min
, max
) > 0) {
794 add_range(&ret
, tmp
->min
, tmp
->max
);
797 if (sval_cmp(tmp
->min
, min
) >= 0 && sval_cmp(tmp
->max
, max
) <= 0)
799 if (sval_cmp(tmp
->min
, min
) >= 0) {
801 add_range(&ret
, max
, tmp
->max
);
802 } else if (sval_cmp(tmp
->max
, max
) <= 0) {
804 add_range(&ret
, tmp
->min
, min
);
808 add_range(&ret
, tmp
->min
, min
);
809 add_range(&ret
, max
, tmp
->max
);
811 } END_FOR_EACH_PTR(tmp
);
815 int ranges_equiv(struct data_range
*one
, struct data_range
*two
)
821 if (sval_cmp(one
->min
, two
->min
) != 0)
823 if (sval_cmp(one
->max
, two
->max
) != 0)
828 int rl_equiv(struct range_list
*one
, struct range_list
*two
)
830 struct data_range
*one_range
;
831 struct data_range
*two_range
;
836 PREPARE_PTR_LIST(one
, one_range
);
837 PREPARE_PTR_LIST(two
, two_range
);
839 if (!one_range
&& !two_range
)
841 if (!ranges_equiv(one_range
, two_range
))
843 NEXT_PTR_LIST(one_range
);
844 NEXT_PTR_LIST(two_range
);
846 FINISH_PTR_LIST(two_range
);
847 FINISH_PTR_LIST(one_range
);
852 int true_comparison_range(struct data_range
*left
, int comparison
, struct data_range
*right
)
854 switch (comparison
) {
856 case SPECIAL_UNSIGNED_LT
:
857 if (sval_cmp(left
->min
, right
->max
) < 0)
860 case SPECIAL_UNSIGNED_LTE
:
862 if (sval_cmp(left
->min
, right
->max
) <= 0)
866 if (sval_cmp(left
->max
, right
->min
) < 0)
868 if (sval_cmp(left
->min
, right
->max
) > 0)
871 case SPECIAL_UNSIGNED_GTE
:
873 if (sval_cmp(left
->max
, right
->min
) >= 0)
877 case SPECIAL_UNSIGNED_GT
:
878 if (sval_cmp(left
->max
, right
->min
) > 0)
881 case SPECIAL_NOTEQUAL
:
882 if (sval_cmp(left
->min
, left
->max
) != 0)
884 if (sval_cmp(right
->min
, right
->max
) != 0)
886 if (sval_cmp(left
->min
, right
->min
) != 0)
890 sm_msg("unhandled comparison %d\n", comparison
);
896 int true_comparison_range_LR(int comparison
, struct data_range
*var
, struct data_range
*val
, int left
)
899 return true_comparison_range(var
, comparison
, val
);
901 return true_comparison_range(val
, comparison
, var
);
904 static int false_comparison_range_sval(struct data_range
*left
, int comparison
, struct data_range
*right
)
906 switch (comparison
) {
908 case SPECIAL_UNSIGNED_LT
:
909 if (sval_cmp(left
->max
, right
->min
) >= 0)
912 case SPECIAL_UNSIGNED_LTE
:
914 if (sval_cmp(left
->max
, right
->min
) > 0)
918 if (sval_cmp(left
->min
, left
->max
) != 0)
920 if (sval_cmp(right
->min
, right
->max
) != 0)
922 if (sval_cmp(left
->min
, right
->min
) != 0)
925 case SPECIAL_UNSIGNED_GTE
:
927 if (sval_cmp(left
->min
, right
->max
) < 0)
931 case SPECIAL_UNSIGNED_GT
:
932 if (sval_cmp(left
->min
, right
->max
) <= 0)
935 case SPECIAL_NOTEQUAL
:
936 if (sval_cmp(left
->max
, right
->min
) < 0)
938 if (sval_cmp(left
->min
, right
->max
) > 0)
942 sm_msg("unhandled comparison %d\n", comparison
);
948 int false_comparison_range_LR(int comparison
, struct data_range
*var
, struct data_range
*val
, int left
)
951 return false_comparison_range_sval(var
, comparison
, val
);
953 return false_comparison_range_sval(val
, comparison
, var
);
956 int possibly_true(struct expression
*left
, int comparison
, struct expression
*right
)
958 struct range_list
*rl_left
, *rl_right
;
959 struct data_range
*tmp_left
, *tmp_right
;
962 if (!get_implied_rl(left
, &rl_left
))
964 if (!get_implied_rl(right
, &rl_right
))
967 type
= rl_type(rl_left
);
968 if (type_positive_bits(type
) < type_positive_bits(rl_type(rl_right
)))
969 type
= rl_type(rl_right
);
970 if (type_positive_bits(type
) < 31)
973 rl_left
= cast_rl(type
, rl_left
);
974 rl_right
= cast_rl(type
, rl_right
);
976 FOR_EACH_PTR(rl_left
, tmp_left
) {
977 FOR_EACH_PTR(rl_right
, tmp_right
) {
978 if (true_comparison_range(tmp_left
, comparison
, tmp_right
))
980 } END_FOR_EACH_PTR(tmp_right
);
981 } END_FOR_EACH_PTR(tmp_left
);
985 int possibly_false(struct expression
*left
, int comparison
, struct expression
*right
)
987 struct range_list
*rl_left
, *rl_right
;
988 struct data_range
*tmp_left
, *tmp_right
;
991 if (!get_implied_rl(left
, &rl_left
))
993 if (!get_implied_rl(right
, &rl_right
))
996 type
= rl_type(rl_left
);
997 if (type_positive_bits(type
) < type_positive_bits(rl_type(rl_right
)))
998 type
= rl_type(rl_right
);
999 if (type_positive_bits(type
) < 31)
1002 rl_left
= cast_rl(type
, rl_left
);
1003 rl_right
= cast_rl(type
, rl_right
);
1005 FOR_EACH_PTR(rl_left
, tmp_left
) {
1006 FOR_EACH_PTR(rl_right
, tmp_right
) {
1007 if (false_comparison_range_sval(tmp_left
, comparison
, tmp_right
))
1009 } END_FOR_EACH_PTR(tmp_right
);
1010 } END_FOR_EACH_PTR(tmp_left
);
1014 int possibly_true_rl(struct range_list
*left_ranges
, int comparison
, struct range_list
*right_ranges
)
1016 struct data_range
*left_tmp
, *right_tmp
;
1017 struct symbol
*type
;
1019 if (!left_ranges
|| !right_ranges
)
1022 type
= rl_type(left_ranges
);
1023 if (type_positive_bits(type
) < type_positive_bits(rl_type(right_ranges
)))
1024 type
= rl_type(right_ranges
);
1025 if (type_positive_bits(type
) < 31)
1028 left_ranges
= cast_rl(type
, left_ranges
);
1029 right_ranges
= cast_rl(type
, right_ranges
);
1031 FOR_EACH_PTR(left_ranges
, left_tmp
) {
1032 FOR_EACH_PTR(right_ranges
, right_tmp
) {
1033 if (true_comparison_range(left_tmp
, comparison
, right_tmp
))
1035 } END_FOR_EACH_PTR(right_tmp
);
1036 } END_FOR_EACH_PTR(left_tmp
);
1040 int possibly_false_rl(struct range_list
*left_ranges
, int comparison
, struct range_list
*right_ranges
)
1042 struct data_range
*left_tmp
, *right_tmp
;
1043 struct symbol
*type
;
1045 if (!left_ranges
|| !right_ranges
)
1048 type
= rl_type(left_ranges
);
1049 if (type_positive_bits(type
) < type_positive_bits(rl_type(right_ranges
)))
1050 type
= rl_type(right_ranges
);
1051 if (type_positive_bits(type
) < 31)
1054 left_ranges
= cast_rl(type
, left_ranges
);
1055 right_ranges
= cast_rl(type
, right_ranges
);
1057 FOR_EACH_PTR(left_ranges
, left_tmp
) {
1058 FOR_EACH_PTR(right_ranges
, right_tmp
) {
1059 if (false_comparison_range_sval(left_tmp
, comparison
, right_tmp
))
1061 } END_FOR_EACH_PTR(right_tmp
);
1062 } END_FOR_EACH_PTR(left_tmp
);
1066 /* FIXME: the _rl here stands for right left so really it should be _lr */
1067 int possibly_true_rl_LR(int comparison
, struct range_list
*a
, struct range_list
*b
, int left
)
1070 return possibly_true_rl(a
, comparison
, b
);
1072 return possibly_true_rl(b
, comparison
, a
);
1075 int possibly_false_rl_LR(int comparison
, struct range_list
*a
, struct range_list
*b
, int left
)
1078 return possibly_false_rl(a
, comparison
, b
);
1080 return possibly_false_rl(b
, comparison
, a
);
1083 int rl_has_sval(struct range_list
*rl
, sval_t sval
)
1085 struct data_range
*tmp
;
1087 FOR_EACH_PTR(rl
, tmp
) {
1088 if (sval_cmp(tmp
->min
, sval
) <= 0 &&
1089 sval_cmp(tmp
->max
, sval
) >= 0)
1091 } END_FOR_EACH_PTR(tmp
);
1095 void tack_on(struct range_list
**list
, struct data_range
*drange
)
1097 add_ptr_list(list
, drange
);
1100 void push_rl(struct range_list_stack
**rl_stack
, struct range_list
*rl
)
1102 add_ptr_list(rl_stack
, rl
);
1105 struct range_list
*pop_rl(struct range_list_stack
**rl_stack
)
1107 struct range_list
*rl
;
1109 rl
= last_ptr_list((struct ptr_list
*)*rl_stack
);
1110 delete_ptr_list_last((struct ptr_list
**)rl_stack
);
1114 struct range_list
*top_rl(struct range_list_stack
*rl_stack
)
1116 struct range_list
*rl
;
1118 rl
= last_ptr_list((struct ptr_list
*)rl_stack
);
1122 void filter_top_rl(struct range_list_stack
**rl_stack
, struct range_list
*filter
)
1124 struct range_list
*rl
;
1126 rl
= pop_rl(rl_stack
);
1127 rl
= rl_filter(rl
, filter
);
1128 push_rl(rl_stack
, rl
);
1131 struct range_list
*rl_truncate_cast(struct symbol
*type
, struct range_list
*rl
)
1133 struct data_range
*tmp
;
1134 struct range_list
*ret
= NULL
;
1140 if (!type
|| type
== rl_type(rl
))
1143 FOR_EACH_PTR(rl
, tmp
) {
1146 if (type_bits(type
) < type_bits(rl_type(rl
))) {
1147 min
.uvalue
= tmp
->min
.uvalue
& ((1ULL << type_bits(type
)) - 1);
1148 max
.uvalue
= tmp
->max
.uvalue
& ((1ULL << type_bits(type
)) - 1);
1150 if (sval_cmp(min
, max
) > 0) {
1151 min
= sval_cast(type
, min
);
1152 max
= sval_cast(type
, max
);
1154 add_range_t(type
, &ret
, min
, max
);
1155 } END_FOR_EACH_PTR(tmp
);
1160 static int rl_is_sane(struct range_list
*rl
)
1162 struct data_range
*tmp
;
1163 struct symbol
*type
;
1166 FOR_EACH_PTR(rl
, tmp
) {
1167 if (!sval_fits(type
, tmp
->min
))
1169 if (!sval_fits(type
, tmp
->max
))
1171 if (sval_cmp(tmp
->min
, tmp
->max
) > 0)
1173 } END_FOR_EACH_PTR(tmp
);
1178 static int rl_type_consistent(struct range_list
*rl
)
1180 struct data_range
*tmp
;
1181 struct symbol
*type
;
1184 FOR_EACH_PTR(rl
, tmp
) {
1185 if (type
!= tmp
->min
.type
|| type
!= tmp
->max
.type
)
1187 } END_FOR_EACH_PTR(tmp
);
1191 static struct range_list
*cast_to_bool(struct range_list
*rl
)
1193 struct data_range
*tmp
;
1194 struct range_list
*ret
= NULL
;
1197 sval_t min
= { .type
= &bool_ctype
};
1198 sval_t max
= { .type
= &bool_ctype
};
1200 FOR_EACH_PTR(rl
, tmp
) {
1201 if (tmp
->min
.value
|| tmp
->max
.value
)
1203 if (sval_is_negative(tmp
->min
) &&
1204 sval_is_negative(tmp
->max
))
1206 if (tmp
->min
.value
== 0 ||
1207 tmp
->max
.value
== 0)
1209 if (sval_is_negative(tmp
->min
) &&
1212 } END_FOR_EACH_PTR(tmp
);
1219 add_range(&ret
, min
, max
);
1223 struct range_list
*cast_rl(struct symbol
*type
, struct range_list
*rl
)
1225 struct data_range
*tmp
;
1226 struct range_list
*ret
= NULL
;
1233 if (!rl_is_sane(rl
))
1234 return alloc_whole_rl(type
);
1235 if (type
== rl_type(rl
) && rl_type_consistent(rl
))
1238 if (type
== &bool_ctype
)
1239 return cast_to_bool(rl
);
1241 FOR_EACH_PTR(rl
, tmp
) {
1242 add_range_t(type
, &ret
, tmp
->min
, tmp
->max
);
1243 } END_FOR_EACH_PTR(tmp
);
1246 return alloc_whole_rl(type
);
1251 struct range_list
*rl_invert(struct range_list
*orig
)
1253 struct range_list
*ret
= NULL
;
1254 struct data_range
*tmp
;
1255 sval_t gap_min
, abs_max
, sval
;
1259 if (type_bits(rl_type(orig
)) < 0) /* void type mostly */
1262 gap_min
= sval_type_min(rl_min(orig
).type
);
1263 abs_max
= sval_type_max(rl_max(orig
).type
);
1265 FOR_EACH_PTR(orig
, tmp
) {
1266 if (sval_cmp(tmp
->min
, gap_min
) > 0) {
1267 sval
= sval_type_val(tmp
->min
.type
, tmp
->min
.value
- 1);
1268 add_range(&ret
, gap_min
, sval
);
1270 if (sval_cmp(tmp
->max
, abs_max
) == 0)
1272 gap_min
= sval_type_val(tmp
->max
.type
, tmp
->max
.value
+ 1);
1273 } END_FOR_EACH_PTR(tmp
);
1275 if (sval_cmp(gap_min
, abs_max
) <= 0)
1276 add_range(&ret
, gap_min
, abs_max
);
1281 struct range_list
*rl_filter(struct range_list
*rl
, struct range_list
*filter
)
1283 struct data_range
*tmp
;
1285 FOR_EACH_PTR(filter
, tmp
) {
1286 rl
= remove_range(rl
, tmp
->min
, tmp
->max
);
1287 } END_FOR_EACH_PTR(tmp
);
1292 struct range_list
*rl_intersection(struct range_list
*one
, struct range_list
*two
)
1294 struct range_list
*one_orig
;
1295 struct range_list
*two_orig
;
1296 struct range_list
*ret
;
1297 struct symbol
*ret_type
;
1298 struct symbol
*small_type
;
1299 struct symbol
*large_type
;
1309 ret_type
= rl_type(one
);
1310 small_type
= rl_type(one
);
1311 large_type
= rl_type(two
);
1313 if (type_bits(rl_type(two
)) < type_bits(small_type
)) {
1314 small_type
= rl_type(two
);
1315 large_type
= rl_type(one
);
1318 one
= cast_rl(large_type
, one
);
1319 two
= cast_rl(large_type
, two
);
1322 one
= rl_invert(one
);
1323 two
= rl_invert(two
);
1325 ret
= rl_filter(ret
, one
);
1326 ret
= rl_filter(ret
, two
);
1328 one
= cast_rl(small_type
, one_orig
);
1329 two
= cast_rl(small_type
, two_orig
);
1331 one
= rl_invert(one
);
1332 two
= rl_invert(two
);
1334 ret
= cast_rl(small_type
, ret
);
1335 ret
= rl_filter(ret
, one
);
1336 ret
= rl_filter(ret
, two
);
1338 return cast_rl(ret_type
, ret
);
1341 static struct range_list
*handle_mod_rl(struct range_list
*left
, struct range_list
*right
)
1346 max
= rl_max(right
);
1347 if (sval_is_max(max
))
1352 if (sval_is_negative(max
))
1354 if (sval_cmp(rl_max(left
), max
) < 0)
1358 return alloc_rl(zero
, max
);
1361 static struct range_list
*get_neg_rl(struct range_list
*rl
)
1363 struct data_range
*tmp
;
1364 struct data_range
*new;
1365 struct range_list
*ret
= NULL
;
1369 if (sval_is_positive(rl_min(rl
)))
1372 FOR_EACH_PTR(rl
, tmp
) {
1373 if (sval_is_positive(tmp
->min
))
1375 if (sval_is_positive(tmp
->max
)) {
1376 new = alloc_range(tmp
->min
, tmp
->max
);
1377 new->max
.value
= -1;
1378 add_range(&ret
, new->min
, new->max
);
1381 add_range(&ret
, tmp
->min
, tmp
->max
);
1382 } END_FOR_EACH_PTR(tmp
);
1387 static struct range_list
*get_pos_rl(struct range_list
*rl
)
1389 struct data_range
*tmp
;
1390 struct data_range
*new;
1391 struct range_list
*ret
= NULL
;
1395 if (sval_is_negative(rl_max(rl
)))
1398 FOR_EACH_PTR(rl
, tmp
) {
1399 if (sval_is_negative(tmp
->max
))
1401 if (sval_is_positive(tmp
->min
)) {
1402 add_range(&ret
, tmp
->min
, tmp
->max
);
1405 new = alloc_range(tmp
->min
, tmp
->max
);
1407 add_range(&ret
, new->min
, new->max
);
1408 } END_FOR_EACH_PTR(tmp
);
1413 static struct range_list
*divide_rl_helper(struct range_list
*left
, struct range_list
*right
)
1415 sval_t right_min
, right_max
;
1418 if (!left
|| !right
)
1421 /* let's assume we never divide by zero */
1422 right_min
= rl_min(right
);
1423 right_max
= rl_max(right
);
1424 if (right_min
.value
== 0 && right_max
.value
== 0)
1426 if (right_min
.value
== 0)
1427 right_min
.value
= 1;
1428 if (right_max
.value
== 0)
1429 right_max
.value
= -1;
1431 max
= sval_binop(rl_max(left
), '/', right_min
);
1432 min
= sval_binop(rl_min(left
), '/', right_max
);
1434 return alloc_rl(min
, max
);
1437 static struct range_list
*handle_divide_rl(struct range_list
*left
, struct range_list
*right
)
1439 struct range_list
*left_neg
, *left_pos
, *right_neg
, *right_pos
;
1440 struct range_list
*neg_neg
, *neg_pos
, *pos_neg
, *pos_pos
;
1441 struct range_list
*ret
;
1443 if (is_whole_rl(right
))
1446 left_neg
= get_neg_rl(left
);
1447 left_pos
= get_pos_rl(left
);
1448 right_neg
= get_neg_rl(right
);
1449 right_pos
= get_pos_rl(right
);
1451 neg_neg
= divide_rl_helper(left_neg
, right_neg
);
1452 neg_pos
= divide_rl_helper(left_neg
, right_pos
);
1453 pos_neg
= divide_rl_helper(left_pos
, right_neg
);
1454 pos_pos
= divide_rl_helper(left_pos
, right_pos
);
1456 ret
= rl_union(neg_neg
, neg_pos
);
1457 ret
= rl_union(ret
, pos_neg
);
1458 return rl_union(ret
, pos_pos
);
1461 static struct range_list
*handle_add_mult_rl(struct range_list
*left
, int op
, struct range_list
*right
)
1465 if (sval_binop_overflows(rl_min(left
), op
, rl_min(right
)))
1467 min
= sval_binop(rl_min(left
), op
, rl_min(right
));
1469 if (sval_binop_overflows(rl_max(left
), op
, rl_max(right
)))
1471 max
= sval_binop(rl_max(left
), op
, rl_max(right
));
1473 return alloc_rl(min
, max
);
1476 static unsigned long long rl_bits_always_set(struct range_list
*rl
)
1478 return sval_fls_mask(rl_min(rl
));
1481 static unsigned long long rl_bits_maybe_set(struct range_list
*rl
)
1483 return sval_fls_mask(rl_max(rl
));
1486 static struct range_list
*handle_OR_rl(struct range_list
*left
, struct range_list
*right
)
1488 unsigned long long left_min
, left_max
, right_min
, right_max
;
1492 if ((rl_to_sval(left
, &sval
) || rl_to_sval(right
, &sval
)) &&
1493 !sval_binop_overflows(rl_max(left
), '+', rl_max(right
)))
1494 return rl_binop(left
, '+', right
);
1496 left_min
= rl_bits_always_set(left
);
1497 left_max
= rl_bits_maybe_set(left
);
1498 right_min
= rl_bits_always_set(right
);
1499 right_max
= rl_bits_maybe_set(right
);
1501 min
.type
= max
.type
= &ullong_ctype
;
1502 min
.uvalue
= left_min
| right_min
;
1503 max
.uvalue
= left_max
| right_max
;
1505 return cast_rl(rl_type(left
), alloc_rl(min
, max
));
1508 static struct range_list
*handle_XOR_rl(struct range_list
*left
, struct range_list
*right
)
1510 unsigned long long left_set
, left_maybe
;
1511 unsigned long long right_set
, right_maybe
;
1514 left_set
= rl_bits_always_set(left
);
1515 left_maybe
= rl_bits_maybe_set(left
);
1517 right_set
= rl_bits_always_set(right
);
1518 right_maybe
= rl_bits_maybe_set(right
);
1520 zero
= max
= rl_min(left
);
1522 max
.uvalue
= fls_mask((left_maybe
| right_maybe
) ^ (left_set
& right_set
));
1524 return cast_rl(rl_type(left
), alloc_rl(zero
, max
));
1527 struct range_list
*rl_binop(struct range_list
*left
, int op
, struct range_list
*right
)
1529 struct symbol
*cast_type
;
1530 sval_t left_sval
, right_sval
;
1531 struct range_list
*ret
= NULL
;
1533 cast_type
= rl_type(left
);
1534 if (sval_type_max(rl_type(left
)).uvalue
< sval_type_max(rl_type(right
)).uvalue
)
1535 cast_type
= rl_type(right
);
1536 if (sval_type_max(cast_type
).uvalue
< INT_MAX
)
1537 cast_type
= &int_ctype
;
1539 left
= cast_rl(cast_type
, left
);
1540 right
= cast_rl(cast_type
, right
);
1542 if (!left
|| !right
)
1543 return alloc_whole_rl(cast_type
);
1545 if (rl_to_sval(left
, &left_sval
) && rl_to_sval(right
, &right_sval
)) {
1546 sval_t val
= sval_binop(left_sval
, op
, right_sval
);
1547 return alloc_rl(val
, val
);
1552 ret
= handle_mod_rl(left
, right
);
1555 ret
= handle_divide_rl(left
, right
);
1559 ret
= handle_add_mult_rl(left
, op
, right
);
1562 ret
= handle_OR_rl(left
, right
);
1565 ret
= handle_XOR_rl(left
, right
);
1568 /* FIXME: Do the rest as well */
1571 case SPECIAL_RIGHTSHIFT
:
1572 case SPECIAL_LEFTSHIFT
:
1577 ret
= alloc_whole_rl(cast_type
);
1581 void free_rl(struct range_list
**rlist
)
1583 __free_ptr_list((struct ptr_list
**)rlist
);
1586 static void free_single_dinfo(struct data_info
*dinfo
)
1588 free_rl(&dinfo
->value_ranges
);
1591 static void free_dinfos(struct allocation_blob
*blob
)
1593 unsigned int size
= sizeof(struct data_info
);
1594 unsigned int offset
= 0;
1596 while (offset
< blob
->offset
) {
1597 free_single_dinfo((struct data_info
*)(blob
->data
+ offset
));
1602 void free_data_info_allocs(void)
1604 struct allocator_struct
*desc
= &data_info_allocator
;
1605 struct allocation_blob
*blob
= desc
->blobs
;
1608 desc
->allocations
= 0;
1609 desc
->total_bytes
= 0;
1610 desc
->useful_bytes
= 0;
1611 desc
->freelist
= NULL
;
1613 struct allocation_blob
*next
= blob
->next
;
1615 blob_free(blob
, desc
->chunking
);
1618 clear_data_range_alloc();
1621 void split_comparison_rl(struct range_list
*left_orig
, int op
, struct range_list
*right_orig
,
1622 struct range_list
**left_true_rl
, struct range_list
**left_false_rl
,
1623 struct range_list
**right_true_rl
, struct range_list
**right_false_rl
)
1625 struct range_list
*left_true
, *left_false
;
1626 struct range_list
*right_true
, *right_false
;
1629 min
= sval_type_min(rl_type(left_orig
));
1630 max
= sval_type_max(rl_type(left_orig
));
1632 left_true
= clone_rl(left_orig
);
1633 left_false
= clone_rl(left_orig
);
1634 right_true
= clone_rl(right_orig
);
1635 right_false
= clone_rl(right_orig
);
1639 case SPECIAL_UNSIGNED_LT
:
1640 left_true
= remove_range(left_orig
, rl_max(right_orig
), max
);
1641 if (!sval_is_min(rl_min(right_orig
))) {
1642 left_false
= remove_range(left_orig
, min
, sub_one(rl_min(right_orig
)));
1645 right_true
= remove_range(right_orig
, min
, rl_min(left_orig
));
1646 if (!sval_is_max(rl_max(left_orig
)))
1647 right_false
= remove_range(right_orig
, add_one(rl_max(left_orig
)), max
);
1649 case SPECIAL_UNSIGNED_LTE
:
1651 if (!sval_is_max(rl_max(right_orig
)))
1652 left_true
= remove_range(left_orig
, add_one(rl_max(right_orig
)), max
);
1653 left_false
= remove_range(left_orig
, min
, rl_min(right_orig
));
1655 if (!sval_is_min(rl_min(left_orig
)))
1656 right_true
= remove_range(right_orig
, min
, sub_one(rl_min(left_orig
)));
1657 right_false
= remove_range(right_orig
, rl_max(left_orig
), max
);
1659 if (sval_cmp(rl_min(left_orig
), rl_min(right_orig
)) == 0)
1660 left_false
= remove_range(left_false
, rl_min(left_orig
), rl_min(left_orig
));
1661 if (sval_cmp(rl_max(left_orig
), rl_max(right_orig
)) == 0)
1662 right_false
= remove_range(right_false
, rl_max(left_orig
), rl_max(left_orig
));
1665 if (!sval_is_max(rl_max(right_orig
))) {
1666 left_true
= remove_range(left_true
, add_one(rl_max(right_orig
)), max
);
1668 if (!sval_is_min(rl_min(right_orig
))) {
1669 left_true
= remove_range(left_true
, min
, sub_one(rl_min(right_orig
)));
1671 if (sval_cmp(rl_min(right_orig
), rl_max(right_orig
)) == 0)
1672 left_false
= remove_range(left_orig
, rl_min(right_orig
), rl_min(right_orig
));
1674 if (!sval_is_max(rl_max(left_orig
)))
1675 right_true
= remove_range(right_true
, add_one(rl_max(left_orig
)), max
);
1676 if (!sval_is_min(rl_min(left_orig
)))
1677 right_true
= remove_range(right_true
, min
, sub_one(rl_min(left_orig
)));
1678 if (sval_cmp(rl_min(left_orig
), rl_max(left_orig
)) == 0)
1679 right_false
= remove_range(right_orig
, rl_min(left_orig
), rl_min(left_orig
));
1681 case SPECIAL_UNSIGNED_GTE
:
1683 if (!sval_is_min(rl_min(right_orig
)))
1684 left_true
= remove_range(left_orig
, min
, sub_one(rl_min(right_orig
)));
1685 left_false
= remove_range(left_orig
, rl_max(right_orig
), max
);
1687 if (!sval_is_max(rl_max(left_orig
)))
1688 right_true
= remove_range(right_orig
, add_one(rl_max(left_orig
)), max
);
1689 right_false
= remove_range(right_orig
, min
, rl_min(left_orig
));
1691 if (sval_cmp(rl_min(left_orig
), rl_min(right_orig
)) == 0)
1692 right_false
= remove_range(right_false
, rl_min(left_orig
), rl_min(left_orig
));
1693 if (sval_cmp(rl_max(left_orig
), rl_max(right_orig
)) == 0)
1694 left_false
= remove_range(left_false
, rl_max(left_orig
), rl_max(left_orig
));
1697 case SPECIAL_UNSIGNED_GT
:
1698 left_true
= remove_range(left_orig
, min
, rl_min(right_orig
));
1699 if (!sval_is_max(rl_max(right_orig
)))
1700 left_false
= remove_range(left_orig
, add_one(rl_max(right_orig
)), max
);
1702 right_true
= remove_range(right_orig
, rl_max(left_orig
), max
);
1703 if (!sval_is_min(rl_min(left_orig
)))
1704 right_false
= remove_range(right_orig
, min
, sub_one(rl_min(left_orig
)));
1706 case SPECIAL_NOTEQUAL
:
1707 if (!sval_is_max(rl_max(right_orig
)))
1708 left_false
= remove_range(left_false
, add_one(rl_max(right_orig
)), max
);
1709 if (!sval_is_min(rl_min(right_orig
)))
1710 left_false
= remove_range(left_false
, min
, sub_one(rl_min(right_orig
)));
1711 if (sval_cmp(rl_min(right_orig
), rl_max(right_orig
)) == 0)
1712 left_true
= remove_range(left_orig
, rl_min(right_orig
), rl_min(right_orig
));
1714 if (!sval_is_max(rl_max(left_orig
)))
1715 right_false
= remove_range(right_false
, add_one(rl_max(left_orig
)), max
);
1716 if (!sval_is_min(rl_min(left_orig
)))
1717 right_false
= remove_range(right_false
, min
, sub_one(rl_min(left_orig
)));
1718 if (sval_cmp(rl_min(left_orig
), rl_max(left_orig
)) == 0)
1719 right_true
= remove_range(right_orig
, rl_min(left_orig
), rl_min(left_orig
));
1722 sm_msg("internal error: unhandled comparison %d", op
);
1727 *left_true_rl
= left_true
;
1728 *left_false_rl
= left_false
;
1730 if (right_true_rl
) {
1731 *right_true_rl
= right_true
;
1732 *right_false_rl
= right_false
;