2 * Copyright (C) 2010 Dan Carpenter.
4 * This program is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU General Public License
6 * as published by the Free Software Foundation; either version 2
7 * of the License, or (at your option) any later version.
9 * This program is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 * GNU General Public License for more details.
14 * You should have received a copy of the GNU General Public License
15 * along with this program; if not, see http://www.gnu.org/copyleft/gpl.txt
23 #include "smatch_slist.h"
24 #include "smatch_extra.h"
26 static bool get_rl_sval(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
, sval_t
*sval_res
);
27 static bool get_rl_internal(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
);
28 static bool handle_variable(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
, sval_t
*res_sval
);
29 static struct range_list
*(*custom_handle_variable
)(struct expression
*expr
);
31 static bool get_implied_value_internal(struct expression
*expr
, int *recurse_cnt
, sval_t
*res_sval
);
32 static int get_absolute_rl_internal(struct expression
*expr
, struct range_list
**rl
, int *recurse_cnt
);
34 static sval_t zero
= {.type
= &int_ctype
, {.value
= 0} };
35 static sval_t one
= {.type
= &int_ctype
, {.value
= 1} };
37 static int fast_math_only
;
39 struct range_list
*rl_zero(void)
41 static struct range_list
*zero_perm
;
44 zero_perm
= clone_rl_permanent(alloc_rl(zero
, zero
));
48 struct range_list
*rl_one(void)
50 static struct range_list
*one_perm
;
53 one_perm
= clone_rl_permanent(alloc_rl(one
, one
));
67 static bool last_stmt_rl(struct statement
*stmt
, int implied
, int *recurse_cnt
, struct range_list
**res
, sval_t
*res_sval
)
69 struct expression
*expr
;
74 stmt
= last_ptr_list((struct ptr_list
*)stmt
->stmts
);
75 if (stmt
->type
== STMT_LABEL
) {
76 if (stmt
->label_statement
&&
77 stmt
->label_statement
->type
== STMT_EXPRESSION
)
78 expr
= stmt
->label_statement
->expression
;
81 } else if (stmt
->type
== STMT_EXPRESSION
) {
82 expr
= stmt
->expression
;
86 return get_rl_sval(expr
, implied
, recurse_cnt
, res
, res_sval
);
89 static bool handle_expression_statement_rl(struct expression
*expr
, int implied
,
90 int *recurse_cnt
, struct range_list
**res
, sval_t
*res_sval
)
92 return last_stmt_rl(get_expression_statement(expr
), implied
, recurse_cnt
, res
, res_sval
);
95 static bool handle_address(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
, sval_t
*res_sval
)
97 struct range_list
*rl
;
103 if (implied
== RL_EXACT
)
106 if (custom_handle_variable
) {
107 rl
= custom_handle_variable(expr
);
115 if (get_mtag_sval(expr
, &sval
)) {
121 if (get_address_rl(expr
, res
)) {
129 static bool handle_ampersand_rl(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
, sval_t
*res_sval
)
131 return handle_address(expr
, implied
, recurse_cnt
, res
, res_sval
);
134 static bool handle_negate_rl(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
, sval_t
*res_sval
)
136 if (known_condition_true(expr
->unop
)) {
140 if (known_condition_false(expr
->unop
)) {
145 if (implied
== RL_EXACT
)
148 if (implied_condition_true(expr
->unop
)) {
152 if (implied_condition_false(expr
->unop
)) {
157 *res
= alloc_rl(zero
, one
);
161 static bool handle_bitwise_negate(struct expression
*expr
, int implied
, int *recurse_cnt
, sval_t
*res_sval
)
163 struct range_list
*rl
;
166 if (!get_rl_sval(expr
->unop
, implied
, recurse_cnt
, &rl
, &sval
))
168 if (!sval
.type
&& !rl_to_sval(rl
, &sval
))
170 sval
= sval_preop(sval
, '~');
171 sval_cast(get_type(expr
->unop
), sval
);
176 static bool untrusted_type_min(struct expression
*expr
)
178 struct range_list
*rl
;
180 rl
= var_user_rl(expr
);
181 return rl
&& sval_is_min(rl_min(rl
));
184 static bool handle_minus_preop(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
, sval_t
*res_sval
)
186 struct range_list
*rl
;
187 struct range_list
*ret
= NULL
;
189 sval_t neg_one
= { .value
= -1 };
190 sval_t zero
= { .value
= 0 };
193 if (!get_rl_sval(expr
->unop
, implied
, recurse_cnt
, &rl
, &sval
))
196 *res_sval
= sval_preop(sval
, '-');
200 * One complication is that -INT_MIN is still INT_MIN because of integer
201 * overflows... But how many times do we set a time out to INT_MIN?
202 * So normally when we call abs() then it does return a positive value.
206 neg_one
.type
= zero
.type
= type
;
208 if (sval_is_negative(rl_min(rl
))) {
209 struct range_list
*neg
;
210 struct data_range
*drange
;
211 sval_t new_min
, new_max
;
213 neg
= alloc_rl(sval_type_min(type
), neg_one
);
214 neg
= rl_intersection(rl
, neg
);
216 if (sval_is_min(rl_min(neg
)) && !sval_is_min(rl_max(neg
)))
217 neg
= remove_range(neg
, sval_type_min(type
), sval_type_min(type
));
219 FOR_EACH_PTR(neg
, drange
) {
220 new_min
= drange
->max
;
221 new_min
.value
= -new_min
.value
;
222 new_max
= drange
->min
;
223 new_max
.value
= -new_max
.value
;
224 add_range(&ret
, new_min
, new_max
);
225 } END_FOR_EACH_PTR(drange
);
227 if (untrusted_type_min(expr
))
228 add_range(&ret
, sval_type_min(type
), sval_type_min(type
));
231 if (!sval_is_negative(rl_max(rl
))) {
232 struct range_list
*pos
;
233 struct data_range
*drange
;
234 sval_t new_min
, new_max
;
236 pos
= alloc_rl(zero
, sval_type_max(type
));
237 pos
= rl_intersection(rl
, pos
);
239 FOR_EACH_PTR(pos
, drange
) {
240 new_min
= drange
->max
;
241 new_min
.value
= -new_min
.value
;
242 new_max
= drange
->min
;
243 new_max
.value
= -new_max
.value
;
244 add_range(&ret
, new_min
, new_max
);
245 } END_FOR_EACH_PTR(drange
);
252 static bool handle_preop_rl(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
, sval_t
*res_sval
)
256 return handle_ampersand_rl(expr
, implied
, recurse_cnt
, res
, res_sval
);
258 return handle_negate_rl(expr
, implied
, recurse_cnt
, res
, res_sval
);
260 return handle_bitwise_negate(expr
, implied
, recurse_cnt
, res_sval
);
262 return handle_minus_preop(expr
, implied
, recurse_cnt
, res
, res_sval
);
264 return handle_variable(expr
, implied
, recurse_cnt
, res
, res_sval
);
266 return handle_expression_statement_rl(expr
, implied
, recurse_cnt
, res
, res_sval
);
272 static bool handle_divide_rl(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
)
274 struct range_list
*left_rl
= NULL
;
275 struct range_list
*right_rl
= NULL
;
278 type
= get_type(expr
);
280 get_rl_internal(expr
->left
, implied
, recurse_cnt
, &left_rl
);
281 left_rl
= cast_rl(type
, left_rl
);
282 get_rl_internal(expr
->right
, implied
, recurse_cnt
, &right_rl
);
283 right_rl
= cast_rl(type
, right_rl
);
285 if (!left_rl
|| !right_rl
)
288 if (implied
!= RL_REAL_ABSOLUTE
) {
289 if (is_whole_rl(left_rl
) || is_whole_rl(right_rl
))
293 *res
= rl_binop(left_rl
, '/', right_rl
);
297 static int handle_offset_subtraction(struct expression
*expr
)
299 struct expression
*left
, *right
;
300 struct symbol
*left_sym
, *right_sym
;
302 int left_offset
, right_offset
;
304 type
= get_type(expr
);
305 if (!type
|| type
->type
!= SYM_PTR
)
307 type
= get_real_base_type(type
);
308 if (!type
|| (type_bits(type
) != 8 && (type
!= &void_ctype
)))
311 left
= strip_expr(expr
->left
);
312 right
= strip_expr(expr
->right
);
314 if (left
->type
!= EXPR_PREOP
|| left
->op
!= '&')
316 left
= strip_expr(left
->unop
);
318 left_sym
= expr_to_sym(left
);
319 right_sym
= expr_to_sym(right
);
320 if (!left_sym
|| left_sym
!= right_sym
)
323 left_offset
= get_member_offset_from_deref(left
);
324 if (right
->type
== EXPR_SYMBOL
)
327 if (right
->type
!= EXPR_PREOP
|| right
->op
!= '&')
329 right
= strip_expr(right
->unop
);
330 right_offset
= get_member_offset_from_deref(right
);
332 if (left_offset
< 0 || right_offset
< 0)
335 return left_offset
- right_offset
;
338 static bool max_is_unknown_max(struct range_list
*rl
)
341 * The issue with this code is that we had:
342 * if (foo > 1) return 1 - foo;
343 * Ideally we would say that returns s32min-(-1) but what Smatch
344 * was saying was that the lowest possible value was "1 - INT_MAX"
346 * My solution is to ignore max values for int or larger. I keep
347 * the max for shorts etc, because those might be worthwhile.
349 * The problem with just returning 1 - INT_MAX is that that is
350 * treated as useful information but s32min is treated as basically
354 if (type_bits(rl_type(rl
)) < 31)
356 return sval_is_max(rl_max(rl
));
359 static bool handle_add_rl(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
)
361 struct range_list
*left_rl
= NULL
;
362 struct range_list
*right_rl
= NULL
;
363 struct range_list
*valid
;
367 type
= get_type(expr
);
369 get_rl_internal(expr
->left
, implied
, recurse_cnt
, &left_rl
);
370 left_rl
= cast_rl(type
, left_rl
);
371 get_rl_internal(expr
->right
, implied
, recurse_cnt
, &right_rl
);
372 right_rl
= cast_rl(type
, right_rl
);
377 if (type_is_ptr(type
) && !var_user_rl(expr
->right
)) {
378 valid
= rl_intersection(left_rl
, valid_ptr_rl
);
379 if (valid
&& rl_equiv(valid
, left_rl
))
386 if (sval_binop_overflows(rl_min(left_rl
), expr
->op
, rl_min(right_rl
)))
388 if (sval_binop_overflows(rl_max(left_rl
), expr
->op
, rl_max(right_rl
)))
391 min
= sval_binop(rl_min(left_rl
), expr
->op
, rl_min(right_rl
));
392 max
= sval_binop(rl_max(left_rl
), expr
->op
, rl_max(right_rl
));
394 *res
= alloc_rl(min
, max
);
398 static bool handle_subtract_rl(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
)
401 struct range_list
*left_orig
, *right_orig
;
402 struct range_list
*left_rl
, *right_rl
;
403 sval_t min
, max
, tmp
;
407 type
= get_type(expr
);
409 offset
= handle_offset_subtraction(expr
);
414 *res
= alloc_rl(tmp
, tmp
);
418 comparison
= get_comparison(expr
->left
, expr
->right
);
421 get_rl_internal(expr
->left
, implied
, recurse_cnt
, &left_orig
);
422 left_rl
= cast_rl(type
, left_orig
);
424 get_rl_internal(expr
->right
, implied
, recurse_cnt
, &right_orig
);
425 right_rl
= cast_rl(type
, right_orig
);
427 if ((!left_rl
|| !right_rl
) &&
428 (implied
== RL_EXACT
|| implied
== RL_HARD
|| implied
== RL_FUZZY
))
432 left_rl
= alloc_whole_rl(type
);
434 right_rl
= alloc_whole_rl(type
);
436 /* negative values complicate everything fix this later */
437 if (sval_is_negative(rl_min(right_rl
)))
439 max
= rl_max(left_rl
);
440 min
= sval_type_min(type
);
442 switch (comparison
) {
444 case SPECIAL_UNSIGNED_GT
:
445 min
= sval_type_val(type
, 1);
446 max
= rl_max(left_rl
);
449 case SPECIAL_UNSIGNED_GTE
:
450 min
= sval_type_val(type
, 0);
451 max
= rl_max(left_rl
);
454 min
= sval_type_val(type
, 0);
455 max
= sval_type_val(type
, 0);
458 case SPECIAL_UNSIGNED_LT
:
459 max
= sval_type_val(type
, -1);
462 case SPECIAL_UNSIGNED_LTE
:
463 max
= sval_type_val(type
, 0);
466 if (!left_orig
|| !right_orig
)
468 *res
= rl_binop(left_rl
, '-', right_rl
);
472 if (!max_is_unknown_max(right_rl
) &&
473 !sval_binop_overflows(rl_min(left_rl
), '-', rl_max(right_rl
))) {
474 tmp
= sval_binop(rl_min(left_rl
), '-', rl_max(right_rl
));
475 if (sval_cmp(tmp
, min
) > 0)
479 if (!sval_is_max(rl_max(left_rl
))) {
480 tmp
= sval_binop(rl_max(left_rl
), '-', rl_min(right_rl
));
481 if (sval_cmp(tmp
, max
) < 0)
485 if (sval_is_min(min
) && sval_is_max(max
))
488 *res
= cast_rl(type
, alloc_rl(min
, max
));
492 static bool handle_mod_rl(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
)
494 struct range_list
*rl
;
495 sval_t left
, right
, sval
;
497 if (implied
== RL_EXACT
) {
498 if (!get_implied_value(expr
->right
, &right
))
500 if (!get_implied_value(expr
->left
, &left
))
502 sval
= sval_binop(left
, '%', right
);
503 *res
= alloc_rl(sval
, sval
);
506 /* if we can't figure out the right side it's probably hopeless */
507 if (!get_implied_value_internal(expr
->right
, recurse_cnt
, &right
))
510 right
= sval_cast(get_type(expr
), right
);
513 if (get_rl_internal(expr
->left
, implied
, recurse_cnt
, &rl
) && rl
&&
514 rl_max(rl
).uvalue
< right
.uvalue
)
515 right
.uvalue
= rl_max(rl
).uvalue
;
517 *res
= alloc_rl(sval_cast(right
.type
, zero
), right
);
521 static bool handle_bitwise_AND(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
)
524 struct range_list
*left_rl
, *right_rl
;
527 if (implied
!= RL_IMPLIED
&& implied
!= RL_ABSOLUTE
&& implied
!= RL_REAL_ABSOLUTE
)
530 type
= get_type(expr
);
532 if (!get_rl_internal(expr
->left
, implied
, recurse_cnt
, &left_rl
))
533 left_rl
= alloc_whole_rl(type
);
534 left_rl
= cast_rl(type
, left_rl
);
536 new_recurse
= *recurse_cnt
;
537 if (*recurse_cnt
>= 200)
538 new_recurse
= 100; /* Let's try super hard to get the mask */
539 if (!get_rl_internal(expr
->right
, implied
, &new_recurse
, &right_rl
))
540 right_rl
= alloc_whole_rl(type
);
541 right_rl
= cast_rl(type
, right_rl
);
542 *recurse_cnt
= new_recurse
;
544 *res
= rl_binop(left_rl
, '&', right_rl
);
548 static bool use_rl_binop(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
)
551 struct range_list
*left_rl
, *right_rl
;
553 if (implied
!= RL_IMPLIED
&& implied
!= RL_ABSOLUTE
&& implied
!= RL_REAL_ABSOLUTE
)
556 type
= get_type(expr
);
558 get_absolute_rl_internal(expr
->left
, &left_rl
, recurse_cnt
);
559 get_absolute_rl_internal(expr
->right
, &right_rl
, recurse_cnt
);
560 left_rl
= cast_rl(type
, left_rl
);
561 right_rl
= cast_rl(type
, right_rl
);
562 if (!left_rl
|| !right_rl
)
565 *res
= rl_binop(left_rl
, expr
->op
, right_rl
);
569 static bool handle_right_shift(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
)
571 struct range_list
*left_rl
, *right_rl
;
574 if (implied
== RL_EXACT
|| implied
== RL_HARD
)
577 if (get_rl_internal(expr
->left
, implied
, recurse_cnt
, &left_rl
)) {
578 max
= rl_max(left_rl
);
579 min
= rl_min(left_rl
);
581 if (implied
== RL_FUZZY
)
583 max
= sval_type_max(get_type(expr
->left
));
584 min
= sval_type_val(get_type(expr
->left
), 0);
587 if (get_rl_internal(expr
->right
, implied
, recurse_cnt
, &right_rl
) &&
588 !sval_is_negative(rl_min(right_rl
))) {
589 min
= sval_binop(min
, SPECIAL_RIGHTSHIFT
, rl_max(right_rl
));
590 max
= sval_binop(max
, SPECIAL_RIGHTSHIFT
, rl_min(right_rl
));
591 } else if (!sval_is_negative(min
)) {
593 max
= sval_type_max(max
.type
);
598 *res
= alloc_rl(min
, max
);
602 static bool handle_left_shift(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
)
604 struct range_list
*left_rl
, *rl
;
607 if (implied
== RL_EXACT
|| implied
== RL_HARD
)
609 /* this is hopeless without the right side */
610 if (!get_implied_value_internal(expr
->right
, recurse_cnt
, &right
))
612 if (!get_rl_internal(expr
->left
, implied
, recurse_cnt
, &left_rl
)) {
613 if (implied
== RL_FUZZY
)
615 left_rl
= alloc_whole_rl(get_type(expr
->left
));
618 rl
= rl_binop(left_rl
, SPECIAL_LEFTSHIFT
, alloc_rl(right
, right
));
625 static bool handle_known_binop(struct expression
*expr
, sval_t
*res
)
629 if (!get_value(expr
->left
, &left
))
631 if (!get_value(expr
->right
, &right
))
633 *res
= sval_binop(left
, expr
->op
, right
);
637 static int has_actual_ranges(struct range_list
*rl
)
639 struct data_range
*tmp
;
641 FOR_EACH_PTR(rl
, tmp
) {
642 if (sval_cmp(tmp
->min
, tmp
->max
) != 0)
644 } END_FOR_EACH_PTR(tmp
);
648 static struct range_list
*handle_implied_binop(struct range_list
*left_rl
, int op
, struct range_list
*right_rl
)
650 struct range_list
*res_rl
;
651 struct data_range
*left_drange
, *right_drange
;
654 if (!left_rl
|| !right_rl
)
656 if (has_actual_ranges(left_rl
))
658 if (has_actual_ranges(right_rl
))
661 if (ptr_list_size((struct ptr_list
*)left_rl
) * ptr_list_size((struct ptr_list
*)right_rl
) > 20)
666 FOR_EACH_PTR(left_rl
, left_drange
) {
667 FOR_EACH_PTR(right_rl
, right_drange
) {
668 if ((op
== '%' || op
== '/') &&
669 right_drange
->min
.value
== 0)
671 res
= sval_binop(left_drange
->min
, op
, right_drange
->min
);
672 add_range(&res_rl
, res
, res
);
673 } END_FOR_EACH_PTR(right_drange
);
674 } END_FOR_EACH_PTR(left_drange
);
679 static bool handle_binop_rl_helper(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
, sval_t
*res_sval
)
682 struct range_list
*left_rl
= NULL
;
683 struct range_list
*right_rl
= NULL
;
684 struct range_list
*rl
;
687 type
= get_promoted_type(get_type(expr
->left
), get_type(expr
->right
));
688 get_rl_internal(expr
->left
, implied
, recurse_cnt
, &left_rl
);
689 left_rl
= cast_rl(type
, left_rl
);
690 get_rl_internal(expr
->right
, implied
, recurse_cnt
, &right_rl
);
691 right_rl
= cast_rl(type
, right_rl
);
693 rl
= handle_implied_binop(left_rl
, expr
->op
, right_rl
);
701 return handle_mod_rl(expr
, implied
, recurse_cnt
, res
);
703 return handle_bitwise_AND(expr
, implied
, recurse_cnt
, res
);
706 return use_rl_binop(expr
, implied
, recurse_cnt
, res
);
707 case SPECIAL_RIGHTSHIFT
:
708 return handle_right_shift(expr
, implied
, recurse_cnt
, res
);
709 case SPECIAL_LEFTSHIFT
:
710 return handle_left_shift(expr
, implied
, recurse_cnt
, res
);
712 return handle_add_rl(expr
, implied
, recurse_cnt
, res
);
714 return handle_subtract_rl(expr
, implied
, recurse_cnt
, res
);
716 return handle_divide_rl(expr
, implied
, recurse_cnt
, res
);
719 if (!left_rl
|| !right_rl
)
722 if (sval_binop_overflows(rl_min(left_rl
), expr
->op
, rl_min(right_rl
)))
724 if (sval_binop_overflows(rl_max(left_rl
), expr
->op
, rl_max(right_rl
)))
727 min
= sval_binop(rl_min(left_rl
), expr
->op
, rl_min(right_rl
));
728 max
= sval_binop(rl_max(left_rl
), expr
->op
, rl_max(right_rl
));
730 *res
= alloc_rl(min
, max
);
735 static bool handle_binop_rl(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
, sval_t
*res_sval
)
737 struct smatch_state
*state
;
738 struct range_list
*rl
;
742 *res_sval
= *expr
->sval
;
746 if (handle_known_binop(expr
, &val
)) {
747 expr
->sval
= malloc(sizeof(sval_t
));
752 if (implied
== RL_EXACT
)
755 if (custom_handle_variable
) {
756 rl
= custom_handle_variable(expr
);
763 state
= get_extra_state(expr
);
764 if (state
&& !is_whole_rl(estate_rl(state
))) {
765 if (implied
!= RL_HARD
|| estate_has_hard_max(state
)) {
766 *res
= clone_rl(estate_rl(state
));
771 return handle_binop_rl_helper(expr
, implied
, recurse_cnt
, res
, res_sval
);
774 static int do_comparison(struct expression
*expr
)
776 struct range_list
*left_ranges
= NULL
;
777 struct range_list
*right_ranges
= NULL
;
778 int poss_true
, poss_false
;
781 type
= get_type(expr
);
782 get_absolute_rl(expr
->left
, &left_ranges
);
783 get_absolute_rl(expr
->right
, &right_ranges
);
785 left_ranges
= cast_rl(type
, left_ranges
);
786 right_ranges
= cast_rl(type
, right_ranges
);
788 poss_true
= possibly_true_rl(left_ranges
, expr
->op
, right_ranges
);
789 poss_false
= possibly_false_rl(left_ranges
, expr
->op
, right_ranges
);
791 if (!poss_true
&& !poss_false
)
793 if (poss_true
&& !poss_false
)
795 if (!poss_true
&& poss_false
)
800 static bool handle_comparison_rl(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
, sval_t
*res_sval
)
805 if (expr
->op
== SPECIAL_EQUAL
&& expr
->left
->type
== EXPR_TYPE
) {
806 struct symbol
*left
, *right
;
808 if (expr
->right
->type
!= EXPR_TYPE
)
811 left
= get_real_base_type(expr
->left
->symbol
);
812 right
= get_real_base_type(expr
->right
->symbol
);
813 if (type_bits(left
) == type_bits(right
) &&
814 type_positive_bits(left
) == type_positive_bits(right
))
821 if (get_value(expr
->left
, &left
) && get_value(expr
->right
, &right
)) {
822 struct data_range tmp_left
, tmp_right
;
826 tmp_right
.min
= right
;
827 tmp_right
.max
= right
;
828 if (true_comparison_range(&tmp_left
, expr
->op
, &tmp_right
))
835 if (implied
== RL_EXACT
)
838 cmp
= do_comparison(expr
);
848 *res
= alloc_rl(zero
, one
);
852 static bool handle_logical_rl(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
, sval_t
*res_sval
)
858 if (implied
== RL_EXACT
) {
859 if (get_value(expr
->left
, &left
))
861 if (get_value(expr
->right
, &right
))
864 if (get_implied_value_internal(expr
->left
, recurse_cnt
, &left
))
866 if (get_implied_value_internal(expr
->right
, recurse_cnt
, &right
))
871 case SPECIAL_LOGICAL_OR
:
872 if (left_known
&& left
.value
)
874 if (right_known
&& right
.value
)
876 if (left_known
&& right_known
)
879 case SPECIAL_LOGICAL_AND
:
880 if (left_known
&& right_known
) {
881 if (left
.value
&& right
.value
)
890 if (implied
== RL_EXACT
)
893 *res
= alloc_rl(zero
, one
);
904 static bool handle_conditional_rl(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
, sval_t
*res_sval
)
906 struct expression
*cond_true
;
907 struct range_list
*true_rl
, *false_rl
;
909 int final_pass_orig
= final_pass
;
911 cond_true
= expr
->cond_true
;
913 cond_true
= expr
->conditional
;
915 if (known_condition_true(expr
->conditional
))
916 return get_rl_sval(cond_true
, implied
, recurse_cnt
, res
, res_sval
);
917 if (known_condition_false(expr
->conditional
))
918 return get_rl_sval(expr
->cond_false
, implied
, recurse_cnt
, res
, res_sval
);
920 if (implied
== RL_EXACT
)
923 if (implied_condition_true(expr
->conditional
))
924 return get_rl_sval(cond_true
, implied
, recurse_cnt
, res
, res_sval
);
925 if (implied_condition_false(expr
->conditional
))
926 return get_rl_sval(expr
->cond_false
, implied
, recurse_cnt
, res
, res_sval
);
928 /* this becomes a problem with deeply nested conditional statements */
929 if (fast_math_only
|| low_on_memory())
932 type
= get_type(expr
);
934 __push_fake_cur_stree();
936 __split_whole_condition(expr
->conditional
);
938 get_rl_internal(cond_true
, implied
, recurse_cnt
, &true_rl
);
939 __push_true_states();
940 __use_false_states();
942 get_rl_internal(expr
->cond_false
, implied
, recurse_cnt
, &false_rl
);
943 __merge_true_states();
944 __free_fake_cur_stree();
945 final_pass
= final_pass_orig
;
947 if (!true_rl
|| !false_rl
)
949 true_rl
= cast_rl(type
, true_rl
);
950 false_rl
= cast_rl(type
, false_rl
);
952 *res
= rl_union(true_rl
, false_rl
);
956 static bool get_fuzzy_max_helper(struct expression
*expr
, sval_t
*max
)
958 struct smatch_state
*state
;
961 if (get_hard_max(expr
, &sval
)) {
966 state
= get_extra_state(expr
);
967 if (!state
|| !estate_has_fuzzy_max(state
))
969 *max
= sval_cast(get_type(expr
), estate_get_fuzzy_max(state
));
973 static bool get_fuzzy_min_helper(struct expression
*expr
, sval_t
*min
)
975 struct smatch_state
*state
;
978 state
= get_extra_state(expr
);
979 if (!state
|| !estate_rl(state
))
982 sval
= estate_min(state
);
983 if (sval_is_negative(sval
) && sval_is_min(sval
))
986 if (sval_is_max(sval
))
989 *min
= sval_cast(get_type(expr
), sval
);
993 int get_const_value(struct expression
*expr
, sval_t
*sval
)
998 if (expr
->type
!= EXPR_SYMBOL
|| !expr
->symbol
)
1001 if (!(sym
->ctype
.modifiers
& MOD_CONST
))
1003 if (get_value(sym
->initializer
, &right
)) {
1004 *sval
= sval_cast(get_type(expr
), right
);
1010 struct range_list
*var_to_absolute_rl(struct expression
*expr
)
1012 struct smatch_state
*state
;
1013 struct range_list
*rl
;
1015 state
= get_extra_state(expr
);
1016 if (!state
|| is_whole_rl(estate_rl(state
))) {
1017 state
= get_real_absolute_state(expr
);
1018 if (state
&& state
->data
&& !estate_is_whole(state
))
1019 return clone_rl(estate_rl(state
));
1020 if (get_mtag_rl(expr
, &rl
))
1022 if (get_db_type_rl(expr
, &rl
) && !is_whole_rl(rl
))
1024 return alloc_whole_rl(get_type(expr
));
1026 /* err on the side of saying things are possible */
1027 if (!estate_rl(state
))
1028 return alloc_whole_rl(get_type(expr
));
1029 return clone_rl(estate_rl(state
));
1032 static bool is_param_sym(struct expression
*expr
)
1034 if (expr
->type
!= EXPR_SYMBOL
)
1036 if (get_param_num(expr
) < 0)
1041 static bool handle_variable(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
, sval_t
*res_sval
)
1043 struct smatch_state
*state
;
1044 struct range_list
*rl
;
1045 sval_t sval
, min
, max
;
1046 struct symbol
*type
;
1048 if (get_const_value(expr
, &sval
)) {
1053 if (implied
== RL_EXACT
)
1056 if (custom_handle_variable
) {
1057 rl
= custom_handle_variable(expr
);
1059 if (!rl_to_sval(rl
, res_sval
))
1062 *res
= var_to_absolute_rl(expr
);
1067 if (get_mtag_sval(expr
, &sval
)) {
1072 type
= get_type(expr
);
1074 ((type
->type
== SYM_ARRAY
&& !is_param_sym(expr
)) ||
1075 type
->type
== SYM_FN
))
1076 return handle_address(expr
, implied
, recurse_cnt
, res
, res_sval
);
1078 /* FIXME: call rl_to_sval() on the results */
1084 state
= get_extra_state(expr
);
1086 if (implied
== RL_HARD
)
1088 if (get_mtag_rl(expr
, res
))
1090 if (is_array(expr
) && get_array_rl(expr
, res
))
1092 if (implied
== RL_IMPLIED
)
1094 if (get_db_type_rl(expr
, res
))
1098 if (implied
== RL_HARD
&& !estate_has_hard_max(state
))
1100 *res
= clone_rl(estate_rl(state
));
1102 case RL_REAL_ABSOLUTE
: {
1103 struct smatch_state
*abs_state
;
1105 state
= get_extra_state(expr
);
1106 abs_state
= get_real_absolute_state(expr
);
1108 if (estate_rl(state
) && estate_rl(abs_state
)) {
1109 *res
= clone_rl(rl_intersection(estate_rl(state
),
1110 estate_rl(abs_state
)));
1112 } else if (estate_rl(state
)) {
1113 *res
= clone_rl(estate_rl(state
));
1115 } else if (estate_is_empty(state
)) {
1117 * FIXME: we don't handle empty extra states correctly.
1119 * The real abs rl is supposed to be filtered by the
1120 * extra state if there is one. We don't bother keeping
1121 * the abs state in sync all the time because we know it
1122 * will be filtered later.
1124 * It's not totally obvious to me how they should be
1125 * handled. Perhaps we should take the whole rl and
1126 * filter by the imaginary states. Perhaps we should
1127 * just go with the empty state.
1129 * Anyway what we currently do is return NULL here and
1130 * that gets translated into the whole range in
1131 * get_real_absolute_rl().
1135 } else if (estate_rl(abs_state
)) {
1136 *res
= clone_rl(estate_rl(abs_state
));
1140 if (get_mtag_rl(expr
, res
))
1142 if (get_db_type_rl(expr
, res
))
1144 if (is_array(expr
) && get_array_rl(expr
, res
))
1149 if (!get_fuzzy_min_helper(expr
, &min
))
1150 min
= sval_type_min(get_type(expr
));
1151 if (!get_fuzzy_max_helper(expr
, &max
))
1153 /* fuzzy ranges are often inverted */
1154 if (sval_cmp(min
, max
) > 0) {
1159 *res
= alloc_rl(min
, max
);
1165 static sval_t
handle_sizeof(struct expression
*expr
)
1170 ret
= sval_blank(expr
);
1171 sym
= expr
->cast_type
;
1173 sym
= evaluate_expression(expr
->cast_expression
);
1175 __silence_warnings_for_stmt
= true;
1180 * Expressions of restricted types will possibly get
1181 * promoted - check that here. I'm not sure how this works,
1182 * the problem is that sizeof(le16) shouldn't be promoted and
1183 * the original code did that... Let's if zero this out and
1187 if (is_restricted_type(sym
)) {
1188 if (type_bits(sym
) < bits_in_int
)
1192 if (is_fouled_type(sym
))
1195 examine_symbol_type(sym
);
1197 ret
.type
= size_t_ctype
;
1198 if (type_bits(sym
) <= 0) /* sizeof(void) */ {
1199 if (get_real_base_type(sym
) == &void_ctype
)
1204 ret
.value
= type_bytes(sym
);
1209 static bool handle_strlen(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
, sval_t
*res_sval
)
1211 struct expression
*arg
, *tmp
;
1213 sval_t ret
= { .type
= &ulong_ctype
};
1214 struct range_list
*rl
;
1216 arg
= get_argument_from_call_expr(expr
->args
, 0);
1219 if (arg
->type
== EXPR_STRING
) {
1220 ret
.value
= arg
->string
->length
- 1;
1224 if (implied
== RL_EXACT
)
1226 if (get_implied_value(arg
, &tag
) &&
1227 (tmp
= fake_string_from_mtag(tag
.uvalue
))) {
1228 ret
.value
= tmp
->string
->length
- 1;
1233 if (implied
== RL_HARD
|| implied
== RL_FUZZY
)
1236 if (get_implied_return(expr
, &rl
)) {
1244 static bool handle_builtin_constant_p(struct expression
*expr
, int implied
, int *recurse_cnt
, sval_t
*res_sval
)
1246 struct expression
*arg
;
1247 struct range_list
*rl
;
1249 arg
= get_argument_from_call_expr(expr
->args
, 0);
1250 if (get_rl_internal(arg
, RL_EXACT
, recurse_cnt
, &rl
))
1257 static bool handle__builtin_choose_expr(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
, sval_t
*res_sval
)
1259 struct expression
*const_expr
, *expr1
, *expr2
;
1262 const_expr
= get_argument_from_call_expr(expr
->args
, 0);
1263 expr1
= get_argument_from_call_expr(expr
->args
, 1);
1264 expr2
= get_argument_from_call_expr(expr
->args
, 2);
1266 if (!get_value(const_expr
, &sval
) || !expr1
|| !expr2
)
1269 return get_rl_sval(expr1
, implied
, recurse_cnt
, res
, res_sval
);
1271 return get_rl_sval(expr2
, implied
, recurse_cnt
, res
, res_sval
);
1274 int smatch_fls(unsigned long long value
)
1278 for (i
= 63; i
>= 0; i
--) {
1279 if (value
& 1ULL << i
)
1285 static bool handle_ffs(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
, sval_t
*res_sval
)
1287 struct expression
*arg
;
1288 struct bit_info
*bits
;
1289 sval_t high
= { .type
= &int_ctype
};
1290 sval_t low
= { .type
= &int_ctype
};
1292 arg
= get_argument_from_call_expr(expr
->args
, 0);
1294 bits
= get_bit_info(arg
);
1295 if (bits
->possible
== 0) {
1301 high
.value
= ffsll(bits
->set
);
1303 high
.value
= smatch_fls(bits
->possible
);
1305 low
.value
= ffsll(bits
->possible
);
1307 *res
= alloc_rl(low
, high
);
1311 static bool handle_call_rl(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
, sval_t
*res_sval
)
1313 struct range_list
*rl
;
1315 if (sym_name_is("__builtin_constant_p", expr
->fn
))
1316 return handle_builtin_constant_p(expr
, implied
, recurse_cnt
, res_sval
);
1318 if (sym_name_is("__builtin_choose_expr", expr
->fn
))
1319 return handle__builtin_choose_expr(expr
, implied
, recurse_cnt
, res
, res_sval
);
1321 if (sym_name_is("__builtin_expect", expr
->fn
) ||
1322 sym_name_is("__builtin_bswap16", expr
->fn
) ||
1323 sym_name_is("__builtin_bswap32", expr
->fn
) ||
1324 sym_name_is("__builtin_bswap64", expr
->fn
)) {
1325 struct expression
*arg
;
1327 arg
= get_argument_from_call_expr(expr
->args
, 0);
1328 return get_rl_sval(arg
, implied
, recurse_cnt
, res
, res_sval
);
1331 if (sym_name_is("__builtin_ffs", expr
->fn
) ||
1332 sym_name_is("__builtin_ffsl", expr
->fn
) ||
1333 sym_name_is("__builtin_ffsll", expr
->fn
) ||
1334 sym_name_is("__ffs", expr
->fn
))
1335 return handle_ffs(expr
, implied
, recurse_cnt
, res
, res_sval
);
1337 if (sym_name_is("strlen", expr
->fn
))
1338 return handle_strlen(expr
, implied
, recurse_cnt
, res
, res_sval
);
1340 if (implied
== RL_EXACT
|| implied
== RL_HARD
)
1343 if (custom_handle_variable
) {
1344 rl
= custom_handle_variable(expr
);
1351 /* Ugh... get_implied_return() sets *rl to NULL on failure */
1352 if (get_implied_return(expr
, &rl
)) {
1356 rl
= db_return_vals(expr
);
1364 static bool handle_cast(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
, sval_t
*res_sval
)
1366 struct range_list
*rl
;
1367 struct symbol
*type
;
1370 type
= get_type(expr
);
1371 if (get_rl_sval(expr
->cast_expression
, implied
, recurse_cnt
, &rl
, &sval
)) {
1373 *res_sval
= sval_cast(type
, sval
);
1375 *res
= cast_rl(type
, rl
);
1378 if (implied
== RL_ABSOLUTE
|| implied
== RL_REAL_ABSOLUTE
) {
1379 *res
= alloc_whole_rl(type
);
1382 if (implied
== RL_IMPLIED
&& type
&&
1383 type_bits(type
) > 0 && type_bits(type
) < 32) {
1384 *res
= alloc_whole_rl(type
);
1390 static bool get_offset_from_down(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
, sval_t
*res_sval
)
1392 struct expression
*index
;
1393 struct symbol
*type
= expr
->in
;
1394 struct range_list
*rl
;
1395 struct symbol
*field
;
1397 sval_t sval
= { .type
= ssize_t_ctype
};
1398 sval_t tmp_sval
= {};
1401 * FIXME: I don't really know what I'm doing here. I wish that I
1402 * could just get rid of the __builtin_offset() function and use:
1403 * "&((struct bpf_prog *)NULL)->insns[fprog->len]" instead...
1404 * Anyway, I have done the minimum ammount of work to get that
1405 * expression to work.
1409 if (expr
->op
!= '.' || !expr
->down
||
1410 expr
->down
->type
!= EXPR_OFFSETOF
||
1411 expr
->down
->op
!= '[' ||
1415 index
= expr
->down
->index
;
1417 examine_symbol_type(type
);
1418 type
= get_real_base_type(type
);
1421 field
= find_identifier(expr
->ident
, type
->symbol_list
, &offset
);
1425 type
= get_real_base_type(field
);
1426 if (!type
|| type
->type
!= SYM_ARRAY
)
1428 type
= get_real_base_type(type
);
1430 if (get_implied_value_internal(index
, recurse_cnt
, &sval
)) {
1431 res_sval
->type
= ssize_t_ctype
;
1432 res_sval
->value
= offset
+ sval
.value
* type_bytes(type
);
1436 if (!get_rl_sval(index
, implied
, recurse_cnt
, &rl
, &tmp_sval
))
1440 * I'm not sure why get_rl_sval() would return an sval when
1441 * get_implied_value_internal() failed but it does when I
1442 * parse drivers/net/ethernet/mellanox/mlx5/core/en/monitor_stats.c
1445 if (tmp_sval
.type
) {
1446 res_sval
->type
= ssize_t_ctype
;
1447 res_sval
->value
= offset
+ sval
.value
* type_bytes(type
);
1451 sval
.value
= type_bytes(type
);
1452 rl
= rl_binop(rl
, '*', alloc_rl(sval
, sval
));
1453 sval
.value
= offset
;
1454 *res
= rl_binop(rl
, '+', alloc_rl(sval
, sval
));
1458 static bool get_offset_from_in(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
, sval_t
*res_sval
)
1460 struct symbol
*type
= get_real_base_type(expr
->in
);
1461 struct symbol
*field
;
1464 if (expr
->op
!= '.' || !type
|| !expr
->ident
)
1467 field
= find_identifier(expr
->ident
, type
->symbol_list
, &offset
);
1471 res_sval
->type
= size_t_ctype
;
1472 res_sval
->value
= offset
;
1477 static bool handle_offsetof_rl(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
, sval_t
*res_sval
)
1479 if (get_offset_from_down(expr
, implied
, recurse_cnt
, res
, res_sval
))
1482 if (get_offset_from_in(expr
, implied
, recurse_cnt
, res
, res_sval
))
1485 evaluate_expression(expr
);
1486 if (expr
->type
== EXPR_VALUE
) {
1487 *res_sval
= sval_from_val(expr
, expr
->value
);
1493 static bool get_rl_sval(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
, sval_t
*sval_res
)
1495 struct range_list
*rl
= (void *)-1UL;
1496 struct symbol
*type
;
1499 type
= get_type(expr
);
1500 expr
= strip_parens(expr
);
1504 if (++(*recurse_cnt
) >= 200)
1507 switch(expr
->type
) {
1509 case EXPR_FORCE_CAST
:
1510 case EXPR_IMPLIED_CAST
:
1511 handle_cast(expr
, implied
, recurse_cnt
, &rl
, &sval
);
1515 expr
= strip_expr(expr
);
1519 switch (expr
->type
) {
1521 sval
= sval_from_val(expr
, expr
->value
);
1524 sval
= sval_from_fval(expr
, expr
->fvalue
);
1527 handle_preop_rl(expr
, implied
, recurse_cnt
, &rl
, &sval
);
1530 get_rl_sval(expr
->unop
, implied
, recurse_cnt
, &rl
, &sval
);
1533 handle_binop_rl(expr
, implied
, recurse_cnt
, &rl
, &sval
);
1536 handle_comparison_rl(expr
, implied
, recurse_cnt
, &rl
, &sval
);
1539 handle_logical_rl(expr
, implied
, recurse_cnt
, &rl
, &sval
);
1541 case EXPR_PTRSIZEOF
:
1543 sval
= handle_sizeof(expr
);
1546 case EXPR_CONDITIONAL
:
1547 handle_conditional_rl(expr
, implied
, recurse_cnt
, &rl
, &sval
);
1550 handle_call_rl(expr
, implied
, recurse_cnt
, &rl
, &sval
);
1553 if (get_mtag_sval(expr
, &sval
))
1555 if (implied
== RL_EXACT
)
1557 rl
= alloc_rl(valid_ptr_min_sval
, valid_ptr_max_sval
);
1560 handle_offsetof_rl(expr
, implied
, recurse_cnt
, &rl
, &sval
);
1563 evaluate_expression(expr
);
1564 if (expr
->type
== EXPR_VALUE
)
1565 sval
= sval_from_val(expr
, expr
->value
);
1568 handle_variable(expr
, implied
, recurse_cnt
, &rl
, &sval
);
1572 if (rl
== (void *)-1UL)
1575 if (sval
.type
|| (rl
&& rl_to_sval(rl
, &sval
))) {
1579 if (implied
== RL_EXACT
)
1586 if (type
&& (implied
== RL_ABSOLUTE
|| implied
== RL_REAL_ABSOLUTE
)) {
1587 *res
= alloc_whole_rl(type
);
1593 static bool get_rl_internal(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
)
1595 struct range_list
*rl
= NULL
;
1598 if (!get_rl_sval(expr
, implied
, recurse_cnt
, &rl
, &sval
))
1602 *res
= alloc_rl(sval
, sval
);
1608 static bool get_rl_helper(struct expression
*expr
, int implied
, struct range_list
**res
)
1610 struct range_list
*rl
= NULL
;
1612 int recurse_cnt
= 0;
1614 if (get_value(expr
, &sval
)) {
1615 if (implied
== RL_HARD
) {
1616 if (sval
.uvalue
== INT_MAX
||
1617 sval
.uvalue
== UINT_MAX
||
1618 sval
.uvalue
== LONG_MAX
||
1619 sval
.uvalue
== ULONG_MAX
)
1622 *res
= alloc_rl(sval
, sval
);
1626 if (!get_rl_sval(expr
, implied
, &recurse_cnt
, &rl
, &sval
))
1630 *res
= alloc_rl(sval
, sval
);
1637 struct expression
*expr
;
1639 } cached_results
[24];
1640 static int cache_idx
;
1642 void clear_math_cache(void)
1644 memset(cached_results
, 0, sizeof(cached_results
));
1647 void set_fast_math_only(void)
1652 void clear_fast_math_only(void)
1658 * Don't cache EXPR_VALUE because values are fast already.
1661 static bool get_value_literal(struct expression
*expr
, sval_t
*res_sval
)
1663 struct expression
*tmp
;
1664 int recurse_cnt
= 0;
1666 tmp
= strip_expr(expr
);
1667 if (!tmp
|| tmp
->type
!= EXPR_VALUE
)
1670 return get_rl_sval(expr
, RL_EXACT
, &recurse_cnt
, NULL
, res_sval
);
1673 /* returns 1 if it can get a value literal or else returns 0 */
1674 int get_value(struct expression
*expr
, sval_t
*res_sval
)
1676 struct range_list
*(*orig_custom_fn
)(struct expression
*expr
);
1677 int recurse_cnt
= 0;
1681 if (get_value_literal(expr
, res_sval
))
1685 * This only handles RL_EXACT because other expr statements can be
1686 * different at different points. Like the list iterator, for example.
1688 for (i
= 0; i
< ARRAY_SIZE(cached_results
); i
++) {
1689 if (expr
== cached_results
[i
].expr
) {
1690 if (cached_results
[i
].sval
.type
) {
1691 *res_sval
= cached_results
[i
].sval
;
1698 orig_custom_fn
= custom_handle_variable
;
1699 custom_handle_variable
= NULL
;
1700 get_rl_sval(expr
, RL_EXACT
, &recurse_cnt
, NULL
, &sval
);
1702 custom_handle_variable
= orig_custom_fn
;
1704 cached_results
[cache_idx
].expr
= expr
;
1705 cached_results
[cache_idx
].sval
= sval
;
1706 cache_idx
= (cache_idx
+ 1) % ARRAY_SIZE(cached_results
);
1715 static bool get_implied_value_internal(struct expression
*expr
, int *recurse_cnt
, sval_t
*res_sval
)
1717 struct range_list
*rl
;
1719 res_sval
->type
= NULL
;
1721 if (!get_rl_sval(expr
, RL_IMPLIED
, recurse_cnt
, &rl
, res_sval
))
1723 if (!res_sval
->type
&& !rl_to_sval(rl
, res_sval
))
1728 int get_implied_value(struct expression
*expr
, sval_t
*sval
)
1730 struct range_list
*rl
;
1732 if (!get_rl_helper(expr
, RL_IMPLIED
, &rl
) ||
1733 !rl_to_sval(rl
, sval
))
1738 int get_implied_value_fast(struct expression
*expr
, sval_t
*sval
)
1740 struct range_list
*rl
;
1748 set_fast_math_only();
1749 if (get_rl_helper(expr
, RL_IMPLIED
, &rl
) &&
1750 rl_to_sval(rl
, sval
))
1752 clear_fast_math_only();
1758 int get_implied_min(struct expression
*expr
, sval_t
*sval
)
1760 struct range_list
*rl
;
1762 if (!get_rl_helper(expr
, RL_IMPLIED
, &rl
) || !rl
)
1768 int get_implied_max(struct expression
*expr
, sval_t
*sval
)
1770 struct range_list
*rl
;
1772 if (!get_rl_helper(expr
, RL_IMPLIED
, &rl
) || !rl
)
1778 int get_implied_rl(struct expression
*expr
, struct range_list
**rl
)
1780 if (!get_rl_helper(expr
, RL_IMPLIED
, rl
) || !*rl
)
1785 static int get_absolute_rl_internal(struct expression
*expr
, struct range_list
**rl
, int *recurse_cnt
)
1788 get_rl_internal(expr
, RL_ABSOLUTE
, recurse_cnt
, rl
);
1790 *rl
= alloc_whole_rl(get_type(expr
));
1794 int get_absolute_rl(struct expression
*expr
, struct range_list
**rl
)
1797 get_rl_helper(expr
, RL_ABSOLUTE
, rl
);
1799 *rl
= alloc_whole_rl(get_type(expr
));
1803 int get_real_absolute_rl(struct expression
*expr
, struct range_list
**rl
)
1806 get_rl_helper(expr
, RL_REAL_ABSOLUTE
, rl
);
1808 *rl
= alloc_whole_rl(get_type(expr
));
1812 int custom_get_absolute_rl(struct expression
*expr
,
1813 struct range_list
*(*fn
)(struct expression
*expr
),
1814 struct range_list
**rl
)
1819 custom_handle_variable
= fn
;
1820 ret
= get_rl_helper(expr
, RL_REAL_ABSOLUTE
, rl
);
1821 custom_handle_variable
= NULL
;
1825 int get_implied_rl_var_sym(const char *var
, struct symbol
*sym
, struct range_list
**rl
)
1827 struct smatch_state
*state
;
1829 state
= get_state(SMATCH_EXTRA
, var
, sym
);
1830 *rl
= estate_rl(state
);
1836 int get_hard_max(struct expression
*expr
, sval_t
*sval
)
1838 struct range_list
*rl
;
1840 if (!get_rl_helper(expr
, RL_HARD
, &rl
) || !rl
)
1846 int get_fuzzy_min(struct expression
*expr
, sval_t
*sval
)
1848 struct range_list
*rl
;
1851 if (!get_rl_helper(expr
, RL_FUZZY
, &rl
) || !rl
)
1854 if (sval_is_negative(tmp
) && sval_is_min(tmp
))
1860 int get_fuzzy_max(struct expression
*expr
, sval_t
*sval
)
1862 struct range_list
*rl
;
1865 if (!get_rl_helper(expr
, RL_FUZZY
, &rl
) || !rl
)
1868 if (max
.uvalue
> INT_MAX
- 10000)
1874 int get_absolute_min(struct expression
*expr
, sval_t
*sval
)
1876 struct range_list
*rl
;
1877 struct symbol
*type
;
1879 type
= get_type(expr
);
1881 type
= &llong_ctype
; // FIXME: this is wrong but places assume get type can't fail.
1883 get_rl_helper(expr
, RL_REAL_ABSOLUTE
, &rl
);
1887 *sval
= sval_type_min(type
);
1889 if (sval_cmp(*sval
, sval_type_min(type
)) < 0)
1890 *sval
= sval_type_min(type
);
1894 int get_absolute_max(struct expression
*expr
, sval_t
*sval
)
1896 struct range_list
*rl
;
1897 struct symbol
*type
;
1899 type
= get_type(expr
);
1901 type
= &llong_ctype
;
1903 get_rl_helper(expr
, RL_REAL_ABSOLUTE
, &rl
);
1907 *sval
= sval_type_max(type
);
1909 if (sval_cmp(sval_type_max(type
), *sval
) < 0)
1910 *sval
= sval_type_max(type
);
1914 int known_condition_true(struct expression
*expr
)
1921 if (__inline_fn
&& get_param_num(expr
) >= 0) {
1922 if (get_implied_value(expr
, &tmp
) && tmp
.value
)
1927 if (get_value(expr
, &tmp
) && tmp
.value
)
1933 int known_condition_false(struct expression
*expr
)
1940 if (__inline_fn
&& get_param_num(expr
) >= 0) {
1941 if (get_implied_value(expr
, &tmp
) && tmp
.value
== 0)
1946 if (expr_is_zero(expr
))
1952 int implied_condition_true(struct expression
*expr
)
1959 if (known_condition_true(expr
))
1961 if (get_implied_value(expr
, &tmp
) && tmp
.value
)
1964 if (expr
->type
== EXPR_POSTOP
)
1965 return implied_condition_true(expr
->unop
);
1967 if (expr
->type
== EXPR_PREOP
&& expr
->op
== SPECIAL_DECREMENT
)
1968 return implied_not_equal(expr
->unop
, 1);
1969 if (expr
->type
== EXPR_PREOP
&& expr
->op
== SPECIAL_INCREMENT
)
1970 return implied_not_equal(expr
->unop
, -1);
1972 expr
= strip_expr(expr
);
1973 switch (expr
->type
) {
1975 if (do_comparison(expr
) == 1)
1979 if (expr
->op
== '!') {
1980 if (implied_condition_false(expr
->unop
))
1986 if (implied_not_equal(expr
, 0) == 1)
1993 int implied_condition_false(struct expression
*expr
)
1995 struct expression
*tmp
;
2001 if (known_condition_false(expr
))
2004 switch (expr
->type
) {
2006 if (do_comparison(expr
) == 2)
2009 if (expr
->op
== '!') {
2010 if (implied_condition_true(expr
->unop
))
2014 tmp
= strip_expr(expr
);
2016 return implied_condition_false(tmp
);
2019 if (get_implied_value(expr
, &sval
) && sval
.value
== 0)