2 * Copyright (C) 2010 Dan Carpenter.
4 * This program is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU General Public License
6 * as published by the Free Software Foundation; either version 2
7 * of the License, or (at your option) any later version.
9 * This program is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 * GNU General Public License for more details.
14 * You should have received a copy of the GNU General Public License
15 * along with this program; if not, see http://www.gnu.org/copyleft/gpl.txt
23 #include "smatch_slist.h"
24 #include "smatch_extra.h"
26 static bool get_rl_sval(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
, sval_t
*sval_res
);
27 static bool get_rl_internal(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
);
28 static bool handle_variable(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
, sval_t
*res_sval
);
29 static struct range_list
*(*custom_handle_variable
)(struct expression
*expr
);
31 static bool get_implied_value_internal(struct expression
*expr
, int *recurse_cnt
, sval_t
*res_sval
);
32 static int get_absolute_rl_internal(struct expression
*expr
, struct range_list
**rl
, int *recurse_cnt
);
34 static sval_t zero
= {.type
= &int_ctype
, {.value
= 0} };
35 static sval_t one
= {.type
= &int_ctype
, {.value
= 1} };
37 static int fast_math_only
;
39 struct range_list
*rl_zero(void)
41 static struct range_list
*zero_perm
;
44 zero_perm
= clone_rl_permanent(alloc_rl(zero
, zero
));
48 struct range_list
*rl_one(void)
50 static struct range_list
*one_perm
;
53 one_perm
= clone_rl_permanent(alloc_rl(one
, one
));
67 static bool last_stmt_rl(struct statement
*stmt
, int implied
, int *recurse_cnt
, struct range_list
**res
, sval_t
*res_sval
)
69 struct expression
*expr
;
74 stmt
= last_ptr_list((struct ptr_list
*)stmt
->stmts
);
75 if (stmt
->type
== STMT_LABEL
) {
76 if (stmt
->label_statement
&&
77 stmt
->label_statement
->type
== STMT_EXPRESSION
)
78 expr
= stmt
->label_statement
->expression
;
81 } else if (stmt
->type
== STMT_EXPRESSION
) {
82 expr
= stmt
->expression
;
86 return get_rl_sval(expr
, implied
, recurse_cnt
, res
, res_sval
);
89 static bool handle_expression_statement_rl(struct expression
*expr
, int implied
,
90 int *recurse_cnt
, struct range_list
**res
, sval_t
*res_sval
)
92 return last_stmt_rl(get_expression_statement(expr
), implied
, recurse_cnt
, res
, res_sval
);
95 static bool handle_address(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
, sval_t
*res_sval
)
97 struct range_list
*rl
;
103 if (implied
== RL_EXACT
)
106 if (custom_handle_variable
) {
107 rl
= custom_handle_variable(expr
);
115 if (get_mtag_sval(expr
, &sval
)) {
121 if (get_address_rl(expr
, res
)) {
129 static bool handle_ampersand_rl(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
, sval_t
*res_sval
)
131 return handle_address(expr
, implied
, recurse_cnt
, res
, res_sval
);
134 static bool handle_negate_rl(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
, sval_t
*res_sval
)
136 if (known_condition_true(expr
->unop
)) {
140 if (known_condition_false(expr
->unop
)) {
145 if (implied
== RL_EXACT
)
148 if (implied_condition_true(expr
->unop
)) {
152 if (implied_condition_false(expr
->unop
)) {
157 *res
= alloc_rl(zero
, one
);
161 static bool handle_bitwise_negate(struct expression
*expr
, int implied
, int *recurse_cnt
, sval_t
*res_sval
)
163 struct range_list
*rl
;
166 if (!get_rl_sval(expr
->unop
, implied
, recurse_cnt
, &rl
, &sval
))
168 if (!sval
.type
&& !rl_to_sval(rl
, &sval
))
170 sval
= sval_preop(sval
, '~');
171 sval_cast(get_type(expr
->unop
), sval
);
176 static bool untrusted_type_min(struct expression
*expr
)
178 struct range_list
*rl
;
180 rl
= var_user_rl(expr
);
181 return rl
&& sval_is_min(rl_min(rl
));
184 static bool handle_minus_preop(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
, sval_t
*res_sval
)
186 struct range_list
*rl
;
187 struct range_list
*ret
= NULL
;
189 sval_t neg_one
= { .value
= -1 };
190 sval_t zero
= { .value
= 0 };
193 if (!get_rl_sval(expr
->unop
, implied
, recurse_cnt
, &rl
, &sval
))
196 *res_sval
= sval_preop(sval
, '-');
200 * One complication is that -INT_MIN is still INT_MIN because of integer
201 * overflows... But how many times do we set a time out to INT_MIN?
202 * So normally when we call abs() then it does return a positive value.
206 neg_one
.type
= zero
.type
= type
;
208 if (sval_is_negative(rl_min(rl
))) {
209 struct range_list
*neg
;
210 struct data_range
*drange
;
211 sval_t new_min
, new_max
;
213 neg
= alloc_rl(sval_type_min(type
), neg_one
);
214 neg
= rl_intersection(rl
, neg
);
216 if (sval_is_min(rl_min(neg
)) && !sval_is_min(rl_max(neg
)))
217 neg
= remove_range(neg
, sval_type_min(type
), sval_type_min(type
));
219 FOR_EACH_PTR(neg
, drange
) {
220 new_min
= drange
->max
;
221 new_min
.value
= -new_min
.value
;
222 new_max
= drange
->min
;
223 new_max
.value
= -new_max
.value
;
224 add_range(&ret
, new_min
, new_max
);
225 } END_FOR_EACH_PTR(drange
);
227 if (untrusted_type_min(expr
))
228 add_range(&ret
, sval_type_min(type
), sval_type_min(type
));
231 if (!sval_is_negative(rl_max(rl
))) {
232 struct range_list
*pos
;
233 struct data_range
*drange
;
234 sval_t new_min
, new_max
;
236 pos
= alloc_rl(zero
, sval_type_max(type
));
237 pos
= rl_intersection(rl
, pos
);
239 FOR_EACH_PTR(pos
, drange
) {
240 new_min
= drange
->max
;
241 new_min
.value
= -new_min
.value
;
242 new_max
= drange
->min
;
243 new_max
.value
= -new_max
.value
;
244 add_range(&ret
, new_min
, new_max
);
245 } END_FOR_EACH_PTR(drange
);
252 static bool handle_preop_rl(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
, sval_t
*res_sval
)
256 return handle_ampersand_rl(expr
, implied
, recurse_cnt
, res
, res_sval
);
258 return handle_negate_rl(expr
, implied
, recurse_cnt
, res
, res_sval
);
260 return handle_bitwise_negate(expr
, implied
, recurse_cnt
, res_sval
);
262 return handle_minus_preop(expr
, implied
, recurse_cnt
, res
, res_sval
);
264 return handle_variable(expr
, implied
, recurse_cnt
, res
, res_sval
);
266 return handle_expression_statement_rl(expr
, implied
, recurse_cnt
, res
, res_sval
);
272 static bool handle_divide_rl(struct range_list
*left_rl
, struct range_list
*right_rl
, int implied
, int *recurse_cnt
, struct range_list
**res
)
274 if (!left_rl
|| !right_rl
)
277 if (implied
!= RL_REAL_ABSOLUTE
) {
278 if (is_whole_rl(left_rl
) || is_whole_rl(right_rl
))
282 *res
= rl_binop(left_rl
, '/', right_rl
);
286 static int handle_offset_subtraction(struct expression
*expr
)
288 struct expression
*left
, *right
;
289 struct symbol
*left_sym
, *right_sym
;
291 int left_offset
, right_offset
;
293 type
= get_type(expr
);
294 if (!type
|| type
->type
!= SYM_PTR
)
296 type
= get_real_base_type(type
);
297 if (!type
|| (type_bits(type
) != 8 && (type
!= &void_ctype
)))
300 left
= strip_expr(expr
->left
);
301 right
= strip_expr(expr
->right
);
303 if (left
->type
!= EXPR_PREOP
|| left
->op
!= '&')
305 left
= strip_expr(left
->unop
);
307 left_sym
= expr_to_sym(left
);
308 right_sym
= expr_to_sym(right
);
309 if (!left_sym
|| left_sym
!= right_sym
)
312 left_offset
= get_member_offset_from_deref(left
);
313 if (right
->type
== EXPR_SYMBOL
)
316 if (right
->type
!= EXPR_PREOP
|| right
->op
!= '&')
318 right
= strip_expr(right
->unop
);
319 right_offset
= get_member_offset_from_deref(right
);
321 if (left_offset
< 0 || right_offset
< 0)
324 return left_offset
- right_offset
;
327 static bool handle_container_of(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
)
329 struct expression
*left
, *right
;
330 struct range_list
*left_orig
= NULL
;
332 sval_t left_sval
, right_sval
;
335 * I'm not 100% if ABSOLUTE should be handled like this but I think if
336 * IMPLIED overrules ABSOLUTE so it's a moot point.
338 * What this function does is if we have:
339 * p = container_of(foo, struct my_struct, member);
340 * Then if the offset is non-zero we can assume that p is a valid
341 * pointer. Mathematically, that's not necessarily true, but in
342 * pratical terms if p isn't valid then we're already in deep trouble
343 * to the point where printing more warnings now won't help.
345 * There are places were the author knows that container_of() is a
346 * no-op so the code will do a NULL test on the result. (This is
347 * obviously horrible code). So to handle code like this if the offset
348 * is zero then the result can be NULL.
350 if (implied
!= RL_IMPLIED
&&
351 implied
!= RL_ABSOLUTE
&&
352 implied
!= RL_REAL_ABSOLUTE
)
355 type
= get_type(expr
);
356 if (!type
|| type
->type
!= SYM_PTR
)
358 type
= get_real_base_type(type
);
359 if (!type
|| (type_bits(type
) != 8 && (type
!= &void_ctype
)))
362 left
= strip_expr(expr
->left
);
363 right
= strip_expr(expr
->right
);
365 if (right
->type
!= EXPR_OFFSETOF
)
368 if (!get_value(right
, &right_sval
))
370 /* Handle offset == 0 in the caller if possible. */
371 if (right_sval
.value
== 0)
374 get_rl_internal(left
, implied
, recurse_cnt
, &left_orig
);
376 * I think known binops are already handled at this point so this
377 * should be impossible. But handle it in the caller either way.
379 if (rl_to_sval(left_orig
, &left_sval
))
382 // TODO: it might be safer to say that known possible NULL or error
383 // error pointers return false.
385 *res
= clone_rl(valid_ptr_rl
);
390 static bool max_is_unknown_max(struct range_list
*rl
)
393 * The issue with this code is that we had:
394 * if (foo > 1) return 1 - foo;
395 * Ideally we would say that returns s32min-(-1) but what Smatch
396 * was saying was that the lowest possible value was "1 - INT_MAX"
398 * My solution is to ignore max values for int or larger. I keep
399 * the max for shorts etc, because those might be worthwhile.
401 * The problem with just returning 1 - INT_MAX is that that is
402 * treated as useful information but s32min is treated as basically
406 if (type_bits(rl_type(rl
)) < 31)
408 return sval_is_max(rl_max(rl
));
411 static bool handle_add_rl(struct expression
*expr
,
412 struct range_list
*left_rl
, struct range_list
*right_rl
,
413 int implied
, int *recurse_cnt
, struct range_list
**res
)
415 struct range_list
*valid
;
419 type
= get_type(expr
);
424 if (type_is_ptr(type
) && !var_user_rl(expr
->right
)) {
425 valid
= rl_intersection(left_rl
, valid_ptr_rl
);
426 if (valid
&& rl_equiv(valid
, left_rl
))
433 if (sval_binop_overflows(rl_min(left_rl
), expr
->op
, rl_min(right_rl
)) ||
434 sval_binop_overflows(rl_max(left_rl
), expr
->op
, rl_max(right_rl
))) {
435 min
= sval_type_min(type
);
436 max
= sval_type_max(type
);
438 min
= sval_binop(rl_min(left_rl
), expr
->op
, rl_min(right_rl
));
439 max
= sval_binop(rl_max(left_rl
), expr
->op
, rl_max(right_rl
));
442 *res
= alloc_rl(min
, max
);
446 static bool handle_subtract_rl(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
)
449 struct range_list
*left_orig
, *right_orig
;
450 struct range_list
*left_rl
, *right_rl
;
451 sval_t min
, max
, tmp
;
455 type
= get_type(expr
);
457 offset
= handle_offset_subtraction(expr
);
462 *res
= alloc_rl(tmp
, tmp
);
466 if (handle_container_of(expr
, implied
, recurse_cnt
, res
))
469 comparison
= get_comparison(expr
->left
, expr
->right
);
472 get_rl_internal(expr
->left
, implied
, recurse_cnt
, &left_orig
);
473 left_rl
= cast_rl(type
, left_orig
);
475 get_rl_internal(expr
->right
, implied
, recurse_cnt
, &right_orig
);
476 right_rl
= cast_rl(type
, right_orig
);
478 if ((!left_rl
|| !right_rl
) &&
479 (implied
== RL_EXACT
|| implied
== RL_HARD
|| implied
== RL_FUZZY
))
483 left_rl
= alloc_whole_rl(type
);
485 right_rl
= alloc_whole_rl(type
);
487 /* negative values complicate everything fix this later */
488 if (sval_is_negative(rl_min(right_rl
)))
490 max
= rl_max(left_rl
);
491 min
= sval_type_min(type
);
493 switch (comparison
) {
495 case SPECIAL_UNSIGNED_GT
:
496 min
= sval_type_val(type
, 1);
497 max
= rl_max(left_rl
);
500 case SPECIAL_UNSIGNED_GTE
:
501 min
= sval_type_val(type
, 0);
502 max
= rl_max(left_rl
);
505 min
= sval_type_val(type
, 0);
506 max
= sval_type_val(type
, 0);
509 case SPECIAL_UNSIGNED_LT
:
510 max
= sval_type_val(type
, -1);
513 case SPECIAL_UNSIGNED_LTE
:
514 max
= sval_type_val(type
, 0);
517 if (!left_orig
|| !right_orig
)
519 *res
= rl_binop(left_rl
, '-', right_rl
);
523 if (!max_is_unknown_max(right_rl
) &&
524 !sval_binop_overflows(rl_min(left_rl
), '-', rl_max(right_rl
))) {
525 tmp
= sval_binop(rl_min(left_rl
), '-', rl_max(right_rl
));
526 if (sval_cmp(tmp
, min
) > 0)
530 if (!sval_is_max(rl_max(left_rl
))) {
531 tmp
= sval_binop(rl_max(left_rl
), '-', rl_min(right_rl
));
532 if (sval_cmp(tmp
, max
) < 0)
536 if (sval_is_min(min
) && sval_is_max(max
))
539 *res
= cast_rl(type
, alloc_rl(min
, max
));
543 static bool handle_mod_rl(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
)
545 struct range_list
*rl
;
546 sval_t left
, right
, sval
;
548 if (implied
== RL_EXACT
) {
549 if (!get_implied_value(expr
->right
, &right
))
551 if (!get_implied_value(expr
->left
, &left
))
553 sval
= sval_binop(left
, '%', right
);
554 *res
= alloc_rl(sval
, sval
);
557 /* if we can't figure out the right side it's probably hopeless */
558 if (!get_implied_value_internal(expr
->right
, recurse_cnt
, &right
))
561 right
= sval_cast(get_type(expr
), right
);
564 if (get_rl_internal(expr
->left
, implied
, recurse_cnt
, &rl
) && rl
&&
565 rl_max(rl
).uvalue
< right
.uvalue
)
566 right
.uvalue
= rl_max(rl
).uvalue
;
568 *res
= alloc_rl(sval_cast(right
.type
, zero
), right
);
572 static bool handle_bitwise_AND(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
)
575 struct range_list
*left_rl
, *right_rl
;
578 if (implied
!= RL_IMPLIED
&& implied
!= RL_ABSOLUTE
&& implied
!= RL_REAL_ABSOLUTE
)
581 type
= get_type(expr
);
583 if (!get_rl_internal(expr
->left
, implied
, recurse_cnt
, &left_rl
))
584 left_rl
= alloc_whole_rl(type
);
585 left_rl
= cast_rl(type
, left_rl
);
587 new_recurse
= *recurse_cnt
;
588 if (*recurse_cnt
>= 200)
589 new_recurse
= 100; /* Let's try super hard to get the mask */
590 if (!get_rl_internal(expr
->right
, implied
, &new_recurse
, &right_rl
))
591 right_rl
= alloc_whole_rl(type
);
592 right_rl
= cast_rl(type
, right_rl
);
593 *recurse_cnt
= new_recurse
;
595 *res
= rl_binop(left_rl
, '&', right_rl
);
599 static bool use_rl_binop(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
)
602 struct range_list
*left_rl
, *right_rl
;
604 if (implied
!= RL_IMPLIED
&& implied
!= RL_ABSOLUTE
&& implied
!= RL_REAL_ABSOLUTE
)
607 type
= get_type(expr
);
609 get_absolute_rl_internal(expr
->left
, &left_rl
, recurse_cnt
);
610 get_absolute_rl_internal(expr
->right
, &right_rl
, recurse_cnt
);
611 left_rl
= cast_rl(type
, left_rl
);
612 right_rl
= cast_rl(type
, right_rl
);
613 if (!left_rl
|| !right_rl
)
616 *res
= rl_binop(left_rl
, expr
->op
, right_rl
);
620 static bool handle_right_shift(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
)
622 struct range_list
*left_rl
, *right_rl
;
625 if (implied
== RL_EXACT
|| implied
== RL_HARD
)
628 if (get_rl_internal(expr
->left
, implied
, recurse_cnt
, &left_rl
)) {
629 max
= rl_max(left_rl
);
630 min
= rl_min(left_rl
);
632 if (implied
== RL_FUZZY
)
634 max
= sval_type_max(get_type(expr
->left
));
635 min
= sval_type_val(get_type(expr
->left
), 0);
638 if (get_rl_internal(expr
->right
, implied
, recurse_cnt
, &right_rl
) &&
639 !sval_is_negative(rl_min(right_rl
))) {
640 min
= sval_binop(min
, SPECIAL_RIGHTSHIFT
, rl_max(right_rl
));
641 max
= sval_binop(max
, SPECIAL_RIGHTSHIFT
, rl_min(right_rl
));
642 } else if (!sval_is_negative(min
)) {
644 max
= sval_type_max(max
.type
);
649 *res
= alloc_rl(min
, max
);
653 static bool handle_left_shift(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
)
655 struct range_list
*left_rl
, *rl
;
658 if (implied
== RL_EXACT
|| implied
== RL_HARD
)
660 /* this is hopeless without the right side */
661 if (!get_implied_value_internal(expr
->right
, recurse_cnt
, &right
))
663 if (!get_rl_internal(expr
->left
, implied
, recurse_cnt
, &left_rl
)) {
664 if (implied
== RL_FUZZY
)
666 left_rl
= alloc_whole_rl(get_type(expr
->left
));
669 rl
= rl_binop(left_rl
, SPECIAL_LEFTSHIFT
, alloc_rl(right
, right
));
676 static bool handle_known_binop(struct expression
*expr
, sval_t
*res
)
680 if (!get_value(expr
->left
, &left
))
682 if (!get_value(expr
->right
, &right
))
684 *res
= sval_binop(left
, expr
->op
, right
);
688 static int has_actual_ranges(struct range_list
*rl
)
690 struct data_range
*tmp
;
692 FOR_EACH_PTR(rl
, tmp
) {
693 if (sval_cmp(tmp
->min
, tmp
->max
) != 0)
695 } END_FOR_EACH_PTR(tmp
);
699 static struct range_list
*handle_implied_binop(struct range_list
*left_rl
, int op
, struct range_list
*right_rl
)
701 struct range_list
*res_rl
;
702 struct data_range
*left_drange
, *right_drange
;
705 if (!left_rl
|| !right_rl
)
707 if (has_actual_ranges(left_rl
))
709 if (has_actual_ranges(right_rl
))
712 if (ptr_list_size((struct ptr_list
*)left_rl
) * ptr_list_size((struct ptr_list
*)right_rl
) > 20)
717 FOR_EACH_PTR(left_rl
, left_drange
) {
718 FOR_EACH_PTR(right_rl
, right_drange
) {
719 if ((op
== '%' || op
== '/') &&
720 right_drange
->min
.value
== 0)
722 res
= sval_binop(left_drange
->min
, op
, right_drange
->min
);
723 add_range(&res_rl
, res
, res
);
724 } END_FOR_EACH_PTR(right_drange
);
725 } END_FOR_EACH_PTR(left_drange
);
730 static bool handle_binop_rl_helper(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
, sval_t
*res_sval
)
733 struct range_list
*left_rl
= NULL
;
734 struct range_list
*right_rl
= NULL
;
735 struct range_list
*rl
;
738 type
= get_promoted_type(get_type(expr
->left
), get_type(expr
->right
));
739 get_rl_internal(expr
->left
, implied
, recurse_cnt
, &left_rl
);
740 left_rl
= cast_rl(type
, left_rl
);
741 get_rl_internal(expr
->right
, implied
, recurse_cnt
, &right_rl
);
742 right_rl
= cast_rl(type
, right_rl
);
744 rl
= handle_implied_binop(left_rl
, expr
->op
, right_rl
);
752 return handle_mod_rl(expr
, implied
, recurse_cnt
, res
);
754 return handle_bitwise_AND(expr
, implied
, recurse_cnt
, res
);
757 return use_rl_binop(expr
, implied
, recurse_cnt
, res
);
758 case SPECIAL_RIGHTSHIFT
:
759 return handle_right_shift(expr
, implied
, recurse_cnt
, res
);
760 case SPECIAL_LEFTSHIFT
:
761 return handle_left_shift(expr
, implied
, recurse_cnt
, res
);
763 return handle_add_rl(expr
, left_rl
, right_rl
, implied
, recurse_cnt
, res
);
765 return handle_subtract_rl(expr
, implied
, recurse_cnt
, res
);
767 return handle_divide_rl(left_rl
, right_rl
, implied
, recurse_cnt
, res
);
770 if (!left_rl
|| !right_rl
)
773 if (sval_binop_overflows(rl_min(left_rl
), expr
->op
, rl_min(right_rl
)) ||
774 sval_binop_overflows(rl_max(left_rl
), expr
->op
, rl_max(right_rl
))) {
775 min
= sval_type_min(type
);
776 max
= sval_type_max(type
);
778 min
= sval_binop(rl_min(left_rl
), expr
->op
, rl_min(right_rl
));
779 max
= sval_binop(rl_max(left_rl
), expr
->op
, rl_max(right_rl
));
782 *res
= alloc_rl(min
, max
);
787 static bool handle_binop_rl(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
, sval_t
*res_sval
)
789 struct smatch_state
*state
;
790 struct range_list
*rl
;
794 *res_sval
= *expr
->sval
;
798 if (handle_known_binop(expr
, &val
)) {
799 expr
->sval
= malloc(sizeof(sval_t
));
804 if (implied
== RL_EXACT
)
807 if (custom_handle_variable
) {
808 rl
= custom_handle_variable(expr
);
815 state
= get_extra_state(expr
);
816 if (state
&& !is_whole_rl(estate_rl(state
))) {
817 if (implied
!= RL_HARD
|| estate_has_hard_max(state
)) {
818 *res
= clone_rl(estate_rl(state
));
823 return handle_binop_rl_helper(expr
, implied
, recurse_cnt
, res
, res_sval
);
826 static int do_comparison(struct expression
*expr
)
828 struct range_list
*left_ranges
= NULL
;
829 struct range_list
*right_ranges
= NULL
;
830 int poss_true
, poss_false
;
833 type
= get_type(expr
);
834 get_absolute_rl(expr
->left
, &left_ranges
);
835 get_absolute_rl(expr
->right
, &right_ranges
);
837 left_ranges
= cast_rl(type
, left_ranges
);
838 right_ranges
= cast_rl(type
, right_ranges
);
840 poss_true
= possibly_true_rl(left_ranges
, expr
->op
, right_ranges
);
841 poss_false
= possibly_false_rl(left_ranges
, expr
->op
, right_ranges
);
843 if (!poss_true
&& !poss_false
)
845 if (poss_true
&& !poss_false
)
847 if (!poss_true
&& poss_false
)
852 static bool handle_comparison_rl(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
, sval_t
*res_sval
)
857 if (expr
->op
== SPECIAL_EQUAL
&& expr
->left
->type
== EXPR_TYPE
) {
858 struct symbol
*left
, *right
;
860 if (expr
->right
->type
!= EXPR_TYPE
)
863 left
= get_real_base_type(expr
->left
->symbol
);
864 right
= get_real_base_type(expr
->right
->symbol
);
866 while (type_is_ptr(left
) || type_is_ptr(right
)) {
868 if ((type_is_ptr(left
) && !type_is_ptr(right
)) ||
869 (!type_is_ptr(left
) && type_is_ptr(right
))) {
874 left
= get_real_base_type(left
);
875 right
= get_real_base_type(right
);
878 if (type_bits(left
) == type_bits(right
) &&
879 type_positive_bits(left
) == type_positive_bits(right
))
886 if (get_value(expr
->left
, &left
) && get_value(expr
->right
, &right
)) {
887 struct data_range tmp_left
, tmp_right
;
891 tmp_right
.min
= right
;
892 tmp_right
.max
= right
;
893 if (true_comparison_range(&tmp_left
, expr
->op
, &tmp_right
))
900 if (implied
== RL_EXACT
)
903 cmp
= do_comparison(expr
);
913 *res
= alloc_rl(zero
, one
);
917 static bool handle_logical_rl(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
, sval_t
*res_sval
)
923 if (implied
== RL_EXACT
) {
924 if (get_value(expr
->left
, &left
))
926 if (get_value(expr
->right
, &right
))
929 if (get_implied_value_internal(expr
->left
, recurse_cnt
, &left
))
931 if (get_implied_value_internal(expr
->right
, recurse_cnt
, &right
))
936 case SPECIAL_LOGICAL_OR
:
937 if (left_known
&& left
.value
)
939 if (right_known
&& right
.value
)
941 if (left_known
&& right_known
)
944 case SPECIAL_LOGICAL_AND
:
945 if (left_known
&& left
.value
== 0)
947 if (right_known
&& right
.value
== 0)
949 if (left_known
&& right_known
)
956 if (implied
== RL_EXACT
)
959 *res
= alloc_rl(zero
, one
);
970 static bool handle_conditional_rl(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
, sval_t
*res_sval
)
972 struct expression
*cond_true
;
973 struct range_list
*true_rl
, *false_rl
;
976 cond_true
= expr
->cond_true
;
978 cond_true
= expr
->conditional
;
980 if (known_condition_true(expr
->conditional
))
981 return get_rl_sval(cond_true
, implied
, recurse_cnt
, res
, res_sval
);
982 if (known_condition_false(expr
->conditional
))
983 return get_rl_sval(expr
->cond_false
, implied
, recurse_cnt
, res
, res_sval
);
985 if (implied
== RL_EXACT
)
988 if (implied_condition_true(expr
->conditional
))
989 return get_rl_sval(cond_true
, implied
, recurse_cnt
, res
, res_sval
);
990 if (implied_condition_false(expr
->conditional
))
991 return get_rl_sval(expr
->cond_false
, implied
, recurse_cnt
, res
, res_sval
);
993 /* this becomes a problem with deeply nested conditional statements */
994 if (fast_math_only
|| low_on_memory())
997 type
= get_type(expr
);
1000 __split_whole_condition(expr
->conditional
);
1002 get_rl_internal(cond_true
, implied
, recurse_cnt
, &true_rl
);
1003 __push_true_states();
1004 __use_false_states();
1006 get_rl_internal(expr
->cond_false
, implied
, recurse_cnt
, &false_rl
);
1007 __merge_true_states();
1010 if (!true_rl
|| !false_rl
)
1012 true_rl
= cast_rl(type
, true_rl
);
1013 false_rl
= cast_rl(type
, false_rl
);
1015 *res
= rl_union(true_rl
, false_rl
);
1019 static bool get_fuzzy_max_helper(struct expression
*expr
, sval_t
*max
)
1021 struct smatch_state
*state
;
1024 if (get_hard_max(expr
, &sval
)) {
1029 state
= get_extra_state(expr
);
1030 if (!state
|| !estate_has_fuzzy_max(state
))
1032 *max
= sval_cast(get_type(expr
), estate_get_fuzzy_max(state
));
1036 static bool get_fuzzy_min_helper(struct expression
*expr
, sval_t
*min
)
1038 struct smatch_state
*state
;
1041 state
= get_extra_state(expr
);
1042 if (!state
|| !estate_rl(state
))
1045 sval
= estate_min(state
);
1046 if (sval_is_negative(sval
) && sval_is_min(sval
))
1049 if (sval_is_max(sval
))
1052 *min
= sval_cast(get_type(expr
), sval
);
1056 int get_const_value(struct expression
*expr
, sval_t
*sval
)
1061 if (expr
->type
!= EXPR_SYMBOL
|| !expr
->symbol
)
1064 if (!(sym
->ctype
.modifiers
& MOD_CONST
))
1066 if (get_value(sym
->initializer
, &right
)) {
1067 *sval
= sval_cast(get_type(expr
), right
);
1073 struct range_list
*var_to_absolute_rl(struct expression
*expr
)
1075 struct smatch_state
*state
;
1076 struct range_list
*rl
;
1078 state
= get_extra_state(expr
);
1079 if (!state
|| is_whole_rl(estate_rl(state
))) {
1080 state
= get_real_absolute_state(expr
);
1081 if (state
&& state
->data
&& !estate_is_whole(state
))
1082 return clone_rl(estate_rl(state
));
1083 if (get_mtag_rl(expr
, &rl
))
1085 if (get_db_type_rl(expr
, &rl
) && !is_whole_rl(rl
))
1087 return alloc_whole_rl(get_type(expr
));
1089 return clone_rl(estate_rl(state
));
1092 static bool is_param_sym(struct expression
*expr
)
1094 if (expr
->type
!= EXPR_SYMBOL
)
1096 if (get_param_num(expr
) < 0)
1101 static bool handle_variable(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
, sval_t
*res_sval
)
1103 struct smatch_state
*state
;
1104 struct range_list
*rl
;
1105 sval_t sval
, min
, max
;
1106 struct symbol
*type
;
1108 if (get_const_value(expr
, &sval
)) {
1113 if (implied
== RL_EXACT
)
1116 if (custom_handle_variable
) {
1117 rl
= custom_handle_variable(expr
);
1119 if (!rl_to_sval(rl
, res_sval
))
1122 *res
= var_to_absolute_rl(expr
);
1127 if (get_mtag_sval(expr
, &sval
)) {
1132 type
= get_type(expr
);
1134 ((type
->type
== SYM_ARRAY
&& !is_param_sym(expr
)) ||
1135 type
->type
== SYM_FN
))
1136 return handle_address(expr
, implied
, recurse_cnt
, res
, res_sval
);
1138 /* FIXME: call rl_to_sval() on the results */
1144 state
= get_extra_state(expr
);
1146 if (implied
== RL_HARD
)
1148 if (get_mtag_rl(expr
, res
))
1150 if (is_array(expr
) && get_array_rl(expr
, res
))
1152 if (implied
== RL_IMPLIED
)
1154 if (get_db_type_rl(expr
, res
))
1158 if (implied
== RL_HARD
&& !estate_has_hard_max(state
))
1160 *res
= clone_rl(estate_rl(state
));
1162 case RL_REAL_ABSOLUTE
: {
1163 struct smatch_state
*abs_state
;
1165 state
= get_extra_state(expr
);
1166 abs_state
= get_real_absolute_state(expr
);
1168 if (estate_rl(state
) && estate_rl(abs_state
)) {
1169 *res
= clone_rl(rl_intersection(estate_rl(state
),
1170 estate_rl(abs_state
)));
1172 } else if (estate_rl(state
)) {
1173 *res
= clone_rl(estate_rl(state
));
1175 } else if (estate_is_empty(state
)) {
1177 * FIXME: we don't handle empty extra states correctly.
1179 * The real abs rl is supposed to be filtered by the
1180 * extra state if there is one. We don't bother keeping
1181 * the abs state in sync all the time because we know it
1182 * will be filtered later.
1184 * It's not totally obvious to me how they should be
1185 * handled. Perhaps we should take the whole rl and
1186 * filter by the imaginary states. Perhaps we should
1187 * just go with the empty state.
1189 * Anyway what we currently do is return NULL here and
1190 * that gets translated into the whole range in
1191 * get_real_absolute_rl().
1195 } else if (estate_rl(abs_state
)) {
1196 *res
= clone_rl(estate_rl(abs_state
));
1200 if (get_mtag_rl(expr
, res
))
1202 if (get_db_type_rl(expr
, res
))
1204 if (is_array(expr
) && get_array_rl(expr
, res
))
1209 if (!get_fuzzy_min_helper(expr
, &min
))
1210 min
= sval_type_min(get_type(expr
));
1211 if (!get_fuzzy_max_helper(expr
, &max
))
1213 /* fuzzy ranges are often inverted */
1214 if (sval_cmp(min
, max
) > 0) {
1219 *res
= alloc_rl(min
, max
);
1225 static sval_t
handle_sizeof(struct expression
*expr
)
1230 ret
= sval_blank(expr
);
1231 sym
= expr
->cast_type
;
1233 sym
= evaluate_expression(expr
->cast_expression
);
1235 __silence_warnings_for_stmt
= true;
1240 * Expressions of restricted types will possibly get
1241 * promoted - check that here. I'm not sure how this works,
1242 * the problem is that sizeof(le16) shouldn't be promoted and
1243 * the original code did that... Let's if zero this out and
1247 if (is_restricted_type(sym
)) {
1248 if (type_bits(sym
) < bits_in_int
)
1252 if (is_fouled_type(sym
))
1255 examine_symbol_type(sym
);
1257 ret
.type
= size_t_ctype
;
1258 if (type_bits(sym
) <= 0) /* sizeof(void) */ {
1259 if (get_real_base_type(sym
) == &void_ctype
)
1264 ret
.value
= type_bytes(sym
);
1269 static bool handle_strlen(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
, sval_t
*res_sval
)
1271 struct expression
*arg
, *tmp
;
1273 sval_t ret
= { .type
= &ulong_ctype
};
1274 struct range_list
*rl
;
1276 arg
= get_argument_from_call_expr(expr
->args
, 0);
1279 if (arg
->type
== EXPR_STRING
) {
1280 ret
.value
= arg
->string
->length
- 1;
1284 if (implied
== RL_EXACT
)
1286 if (get_implied_value(arg
, &tag
) &&
1287 (tmp
= fake_string_from_mtag(tag
.uvalue
))) {
1288 ret
.value
= tmp
->string
->length
- 1;
1293 if (implied
== RL_HARD
|| implied
== RL_FUZZY
)
1296 if (get_implied_return(expr
, &rl
)) {
1304 static bool handle_builtin_constant_p(struct expression
*expr
, int implied
, int *recurse_cnt
, sval_t
*res_sval
)
1306 struct expression
*arg
, *assigned
;
1307 struct range_list
*rl
;
1310 arg
= get_argument_from_call_expr(expr
->args
, 0);
1312 * Originally, Smatch used to pretend there were no constants but then
1313 * it turned out that we need to know at build time if some paths are
1314 * impossible or not to avoid crazy false positives.
1316 * But then someone added a BUILD_BUG_ON(!__builtin_constant_p(_mask)).
1317 * So now we try to figure out if GCC can determine the value at
1320 if (get_rl_internal(arg
, RL_EXACT
, recurse_cnt
, &rl
)) {
1330 assigned
= get_assigned_expr(arg
);
1332 if (assigned
&& get_rl_internal(assigned
, RL_EXACT
, recurse_cnt
, &rl
))
1341 static bool handle__builtin_choose_expr(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
, sval_t
*res_sval
)
1343 struct expression
*const_expr
, *expr1
, *expr2
;
1346 const_expr
= get_argument_from_call_expr(expr
->args
, 0);
1347 expr1
= get_argument_from_call_expr(expr
->args
, 1);
1348 expr2
= get_argument_from_call_expr(expr
->args
, 2);
1350 if (!get_value(const_expr
, &sval
) || !expr1
|| !expr2
)
1353 return get_rl_sval(expr1
, implied
, recurse_cnt
, res
, res_sval
);
1355 return get_rl_sval(expr2
, implied
, recurse_cnt
, res
, res_sval
);
1358 int smatch_fls(unsigned long long value
)
1362 for (i
= 63; i
>= 0; i
--) {
1363 if (value
& 1ULL << i
)
1369 static bool handle_ffs(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
, sval_t
*res_sval
)
1371 struct expression
*arg
;
1372 struct bit_info
*bits
;
1373 sval_t high
= { .type
= &int_ctype
};
1374 sval_t low
= { .type
= &int_ctype
};
1376 arg
= get_argument_from_call_expr(expr
->args
, 0);
1378 bits
= get_bit_info(arg
);
1379 if (bits
->possible
== 0) {
1385 high
.value
= ffsll(bits
->set
);
1387 high
.value
= smatch_fls(bits
->possible
);
1389 low
.value
= ffsll(bits
->possible
);
1391 *res
= alloc_rl(low
, high
);
1395 static bool handle_call_rl(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
, sval_t
*res_sval
)
1397 struct range_list
*rl
;
1399 if (sym_name_is("__builtin_constant_p", expr
->fn
))
1400 return handle_builtin_constant_p(expr
, implied
, recurse_cnt
, res_sval
);
1402 if (sym_name_is("__builtin_choose_expr", expr
->fn
))
1403 return handle__builtin_choose_expr(expr
, implied
, recurse_cnt
, res
, res_sval
);
1405 if (sym_name_is("__builtin_expect", expr
->fn
) ||
1406 sym_name_is("__builtin_bswap16", expr
->fn
) ||
1407 sym_name_is("__builtin_bswap32", expr
->fn
) ||
1408 sym_name_is("__builtin_bswap64", expr
->fn
)) {
1409 struct expression
*arg
;
1411 arg
= get_argument_from_call_expr(expr
->args
, 0);
1412 return get_rl_sval(arg
, implied
, recurse_cnt
, res
, res_sval
);
1415 if (sym_name_is("__builtin_ffs", expr
->fn
) ||
1416 sym_name_is("__builtin_ffsl", expr
->fn
) ||
1417 sym_name_is("__builtin_ffsll", expr
->fn
) ||
1418 sym_name_is("__ffs", expr
->fn
))
1419 return handle_ffs(expr
, implied
, recurse_cnt
, res
, res_sval
);
1421 if (sym_name_is("strlen", expr
->fn
))
1422 return handle_strlen(expr
, implied
, recurse_cnt
, res
, res_sval
);
1424 if (implied
== RL_EXACT
|| implied
== RL_HARD
)
1427 if (custom_handle_variable
) {
1428 rl
= custom_handle_variable(expr
);
1435 /* Ugh... get_implied_return() sets *rl to NULL on failure */
1436 if (get_implied_return(expr
, &rl
)) {
1440 rl
= db_return_vals(expr
);
1448 static bool handle_cast(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
, sval_t
*res_sval
)
1450 struct range_list
*rl
;
1451 struct symbol
*type
;
1454 type
= get_type(expr
);
1455 if (get_rl_sval(expr
->cast_expression
, implied
, recurse_cnt
, &rl
, &sval
)) {
1457 *res_sval
= sval_cast(type
, sval
);
1459 *res
= cast_rl(type
, rl
);
1462 if (implied
== RL_ABSOLUTE
|| implied
== RL_REAL_ABSOLUTE
) {
1463 *res
= alloc_whole_rl(type
);
1466 if (implied
== RL_IMPLIED
&& type
&&
1467 type_bits(type
) > 0 && type_bits(type
) < 32) {
1468 *res
= alloc_whole_rl(type
);
1474 static bool handle_offsetof_rl(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
, sval_t
*res_sval
)
1476 struct expression
*down
= expr
->down
;
1477 struct range_list
*offset_rl
= NULL
, *down_rl
= NULL
;
1478 sval_t sval
= { .type
= ssize_t_ctype
};
1479 struct symbol
*type
;
1481 type
= get_real_base_type(expr
->in
);
1485 if (expr
->op
== '.') {
1486 struct symbol
*field
;
1489 field
= find_identifier(expr
->ident
, type
->symbol_list
, &offset
);
1493 sval
.value
= offset
;
1494 offset_rl
= alloc_rl(sval
, sval
);
1499 offset_rl
= alloc_rl(sval
, sval
);
1501 struct range_list
*idx_rl
= NULL
, *bytes_rl
;
1503 if (get_rl_internal(expr
->index
, implied
, recurse_cnt
, &idx_rl
))
1506 sval
.value
= type_bytes(type
);
1507 if (sval
.value
<= 0)
1509 bytes_rl
= alloc_rl(sval
, sval
);
1511 offset_rl
= rl_binop(idx_rl
, '*', bytes_rl
);
1516 if (down
->type
== EXPR_OFFSETOF
&& !down
->in
)
1518 if (!get_rl_internal(down
, implied
, recurse_cnt
, &down_rl
))
1521 *res
= rl_binop(offset_rl
, '+', down_rl
);
1529 static bool get_rl_sval(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
, sval_t
*sval_res
)
1531 struct range_list
*rl
= (void *)-1UL;
1532 struct symbol
*type
;
1535 type
= get_type(expr
);
1536 expr
= strip_parens(expr
);
1540 if (++(*recurse_cnt
) >= 200)
1543 switch(expr
->type
) {
1545 case EXPR_FORCE_CAST
:
1546 case EXPR_IMPLIED_CAST
:
1547 handle_cast(expr
, implied
, recurse_cnt
, &rl
, &sval
);
1551 expr
= strip_expr(expr
);
1555 switch (expr
->type
) {
1557 sval
= sval_from_val(expr
, expr
->value
);
1560 sval
= sval_from_fval(expr
, expr
->fvalue
);
1563 handle_preop_rl(expr
, implied
, recurse_cnt
, &rl
, &sval
);
1566 get_rl_sval(expr
->unop
, implied
, recurse_cnt
, &rl
, &sval
);
1569 handle_binop_rl(expr
, implied
, recurse_cnt
, &rl
, &sval
);
1572 handle_comparison_rl(expr
, implied
, recurse_cnt
, &rl
, &sval
);
1575 handle_logical_rl(expr
, implied
, recurse_cnt
, &rl
, &sval
);
1577 case EXPR_PTRSIZEOF
:
1579 sval
= handle_sizeof(expr
);
1582 case EXPR_CONDITIONAL
:
1583 handle_conditional_rl(expr
, implied
, recurse_cnt
, &rl
, &sval
);
1586 handle_call_rl(expr
, implied
, recurse_cnt
, &rl
, &sval
);
1589 if (get_mtag_sval(expr
, &sval
))
1591 if (implied
== RL_EXACT
)
1593 rl
= alloc_rl(valid_ptr_min_sval
, valid_ptr_max_sval
);
1596 handle_offsetof_rl(expr
, implied
, recurse_cnt
, &rl
, &sval
);
1599 evaluate_expression(expr
);
1600 if (expr
->type
== EXPR_VALUE
)
1601 sval
= sval_from_val(expr
, expr
->value
);
1604 handle_variable(expr
, implied
, recurse_cnt
, &rl
, &sval
);
1608 if (rl
== (void *)-1UL)
1611 if (sval
.type
|| (rl
&& rl_to_sval(rl
, &sval
))) {
1615 if (implied
== RL_EXACT
)
1622 if (type
&& (implied
== RL_ABSOLUTE
|| implied
== RL_REAL_ABSOLUTE
) &&
1623 !custom_handle_variable
) {
1624 *res
= alloc_whole_rl(type
);
1630 static bool get_rl_internal(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
)
1632 struct range_list
*rl
= NULL
;
1635 if (!get_rl_sval(expr
, implied
, recurse_cnt
, &rl
, &sval
))
1639 *res
= alloc_rl(sval
, sval
);
1645 static bool get_rl_helper(struct expression
*expr
, int implied
, struct range_list
**res
)
1647 struct range_list
*rl
= NULL
;
1649 int recurse_cnt
= 0;
1651 if (get_value(expr
, &sval
)) {
1652 if (implied
== RL_HARD
) {
1653 if (sval
.uvalue
== INT_MAX
||
1654 sval
.uvalue
== UINT_MAX
||
1655 sval
.uvalue
== LONG_MAX
||
1656 sval
.uvalue
== ULONG_MAX
)
1659 *res
= alloc_rl(sval
, sval
);
1663 if (!get_rl_sval(expr
, implied
, &recurse_cnt
, &rl
, &sval
))
1667 *res
= alloc_rl(sval
, sval
);
1674 struct expression
*expr
;
1676 } cached_results
[24];
1677 static int cache_idx
;
1679 void clear_math_cache(void)
1681 memset(cached_results
, 0, sizeof(cached_results
));
1684 void set_fast_math_only(void)
1689 void clear_fast_math_only(void)
1695 * Don't cache EXPR_VALUE because values are fast already.
1698 static bool get_value_literal(struct expression
*expr
, sval_t
*res_sval
)
1700 struct expression
*tmp
;
1701 int recurse_cnt
= 0;
1703 tmp
= strip_expr(expr
);
1704 if (!tmp
|| tmp
->type
!= EXPR_VALUE
)
1707 return get_rl_sval(expr
, RL_EXACT
, &recurse_cnt
, NULL
, res_sval
);
1710 /* returns 1 if it can get a value literal or else returns 0 */
1711 int get_value(struct expression
*expr
, sval_t
*res_sval
)
1713 struct range_list
*(*orig_custom_fn
)(struct expression
*expr
);
1714 int recurse_cnt
= 0;
1718 if (get_value_literal(expr
, res_sval
))
1722 * This only handles RL_EXACT because other expr statements can be
1723 * different at different points. Like the list iterator, for example.
1725 for (i
= 0; i
< ARRAY_SIZE(cached_results
); i
++) {
1726 if (expr
== cached_results
[i
].expr
) {
1727 if (cached_results
[i
].sval
.type
) {
1728 *res_sval
= cached_results
[i
].sval
;
1735 orig_custom_fn
= custom_handle_variable
;
1736 custom_handle_variable
= NULL
;
1737 get_rl_sval(expr
, RL_EXACT
, &recurse_cnt
, NULL
, &sval
);
1739 custom_handle_variable
= orig_custom_fn
;
1741 cached_results
[cache_idx
].expr
= expr
;
1742 cached_results
[cache_idx
].sval
= sval
;
1743 cache_idx
= (cache_idx
+ 1) % ARRAY_SIZE(cached_results
);
1752 static bool get_implied_value_internal(struct expression
*expr
, int *recurse_cnt
, sval_t
*res_sval
)
1754 struct range_list
*rl
;
1756 res_sval
->type
= NULL
;
1758 if (!get_rl_sval(expr
, RL_IMPLIED
, recurse_cnt
, &rl
, res_sval
))
1760 if (!res_sval
->type
&& !rl_to_sval(rl
, res_sval
))
1765 int get_implied_value(struct expression
*expr
, sval_t
*sval
)
1767 struct range_list
*rl
;
1769 if (!get_rl_helper(expr
, RL_IMPLIED
, &rl
) ||
1770 !rl_to_sval(rl
, sval
))
1775 int get_implied_value_fast(struct expression
*expr
, sval_t
*sval
)
1777 struct range_list
*rl
;
1785 set_fast_math_only();
1786 if (get_rl_helper(expr
, RL_IMPLIED
, &rl
) &&
1787 rl_to_sval(rl
, sval
))
1789 clear_fast_math_only();
1795 int get_implied_min(struct expression
*expr
, sval_t
*sval
)
1797 struct range_list
*rl
;
1799 if (!get_rl_helper(expr
, RL_IMPLIED
, &rl
) || !rl
)
1805 int get_implied_max(struct expression
*expr
, sval_t
*sval
)
1807 struct range_list
*rl
;
1809 if (!get_rl_helper(expr
, RL_IMPLIED
, &rl
) || !rl
)
1815 int get_implied_rl(struct expression
*expr
, struct range_list
**rl
)
1817 if (!get_rl_helper(expr
, RL_IMPLIED
, rl
) || !*rl
)
1822 static int get_absolute_rl_internal(struct expression
*expr
, struct range_list
**rl
, int *recurse_cnt
)
1825 get_rl_internal(expr
, RL_ABSOLUTE
, recurse_cnt
, rl
);
1827 *rl
= alloc_whole_rl(get_type(expr
));
1831 int get_absolute_rl(struct expression
*expr
, struct range_list
**rl
)
1834 get_rl_helper(expr
, RL_ABSOLUTE
, rl
);
1836 *rl
= alloc_whole_rl(get_type(expr
));
1840 int get_real_absolute_rl(struct expression
*expr
, struct range_list
**rl
)
1843 get_rl_helper(expr
, RL_REAL_ABSOLUTE
, rl
);
1845 *rl
= alloc_whole_rl(get_type(expr
));
1849 int custom_get_absolute_rl(struct expression
*expr
,
1850 struct range_list
*(*fn
)(struct expression
*expr
),
1851 struct range_list
**rl
)
1853 struct range_list
*(*orig_fn
)(struct expression
*expr
);
1857 orig_fn
= custom_handle_variable
;
1858 custom_handle_variable
= fn
;
1859 ret
= get_rl_helper(expr
, RL_REAL_ABSOLUTE
, rl
);
1860 custom_handle_variable
= orig_fn
;
1864 int get_implied_rl_var_sym(const char *var
, struct symbol
*sym
, struct range_list
**rl
)
1866 struct smatch_state
*state
;
1868 state
= get_state(SMATCH_EXTRA
, var
, sym
);
1869 *rl
= estate_rl(state
);
1875 int get_hard_max(struct expression
*expr
, sval_t
*sval
)
1877 struct range_list
*rl
;
1879 if (!get_rl_helper(expr
, RL_HARD
, &rl
) || !rl
)
1885 int get_fuzzy_min(struct expression
*expr
, sval_t
*sval
)
1887 struct range_list
*rl
;
1890 if (!get_rl_helper(expr
, RL_FUZZY
, &rl
) || !rl
)
1893 if (sval_is_negative(tmp
) && sval_is_min(tmp
))
1899 int get_fuzzy_max(struct expression
*expr
, sval_t
*sval
)
1901 struct range_list
*rl
;
1904 if (!get_rl_helper(expr
, RL_FUZZY
, &rl
) || !rl
)
1907 if (max
.uvalue
> INT_MAX
- 10000)
1913 int get_absolute_min(struct expression
*expr
, sval_t
*sval
)
1915 struct range_list
*rl
;
1916 struct symbol
*type
;
1918 type
= get_type(expr
);
1920 type
= &llong_ctype
; // FIXME: this is wrong but places assume get type can't fail.
1922 get_rl_helper(expr
, RL_REAL_ABSOLUTE
, &rl
);
1926 *sval
= sval_type_min(type
);
1928 if (sval_cmp(*sval
, sval_type_min(type
)) < 0)
1929 *sval
= sval_type_min(type
);
1933 int get_absolute_max(struct expression
*expr
, sval_t
*sval
)
1935 struct range_list
*rl
;
1936 struct symbol
*type
;
1938 type
= get_type(expr
);
1940 type
= &llong_ctype
;
1942 get_rl_helper(expr
, RL_REAL_ABSOLUTE
, &rl
);
1946 *sval
= sval_type_max(type
);
1948 if (sval_cmp(sval_type_max(type
), *sval
) < 0)
1949 *sval
= sval_type_max(type
);
1953 int known_condition_true(struct expression
*expr
)
1960 if (__inline_fn
&& get_param_num(expr
) >= 0) {
1961 if (get_implied_value(expr
, &tmp
) && tmp
.value
)
1966 if (get_value(expr
, &tmp
) && tmp
.value
)
1972 int known_condition_false(struct expression
*expr
)
1979 if (__inline_fn
&& get_param_num(expr
) >= 0) {
1980 if (get_implied_value(expr
, &tmp
) && tmp
.value
== 0)
1985 if (expr_is_zero(expr
))
1991 int implied_condition_true(struct expression
*expr
)
1998 if (known_condition_true(expr
))
2000 if (get_implied_value(expr
, &tmp
) && tmp
.value
)
2003 if (expr
->type
== EXPR_POSTOP
)
2004 return implied_condition_true(expr
->unop
);
2006 if (expr
->type
== EXPR_PREOP
&& expr
->op
== SPECIAL_DECREMENT
)
2007 return implied_not_equal(expr
->unop
, 1);
2008 if (expr
->type
== EXPR_PREOP
&& expr
->op
== SPECIAL_INCREMENT
)
2009 return implied_not_equal(expr
->unop
, -1);
2011 expr
= strip_expr(expr
);
2012 switch (expr
->type
) {
2014 if (do_comparison(expr
) == 1)
2018 if (expr
->op
== '!') {
2019 if (implied_condition_false(expr
->unop
))
2025 if (implied_not_equal(expr
, 0) == 1)
2032 int implied_condition_false(struct expression
*expr
)
2034 struct expression
*tmp
;
2040 if (known_condition_false(expr
))
2043 switch (expr
->type
) {
2045 if (do_comparison(expr
) == 2)
2048 if (expr
->op
== '!') {
2049 if (implied_condition_true(expr
->unop
))
2053 tmp
= strip_expr(expr
);
2055 return implied_condition_false(tmp
);
2058 if (get_implied_value(expr
, &sval
) && sval
.value
== 0)