2 * Copyright (C) 2010 Dan Carpenter.
4 * This program is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU General Public License
6 * as published by the Free Software Foundation; either version 2
7 * of the License, or (at your option) any later version.
9 * This program is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
12 * GNU General Public License for more details.
14 * You should have received a copy of the GNU General Public License
15 * along with this program; if not, see http://www.gnu.org/copyleft/gpl.txt
23 #include "smatch_slist.h"
24 #include "smatch_extra.h"
26 static bool get_rl_sval(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
, sval_t
*sval_res
);
27 static bool get_rl_internal(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
);
28 static bool handle_variable(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
, sval_t
*res_sval
);
29 static struct range_list
*(*custom_handle_variable
)(struct expression
*expr
);
31 static bool get_implied_value_internal(struct expression
*expr
, int *recurse_cnt
, sval_t
*res_sval
);
32 static int get_absolute_rl_internal(struct expression
*expr
, struct range_list
**rl
, int *recurse_cnt
);
34 static sval_t zero
= {.type
= &int_ctype
, {.value
= 0} };
35 static sval_t one
= {.type
= &int_ctype
, {.value
= 1} };
37 static int fast_math_only
;
39 struct range_list
*rl_zero(void)
41 static struct range_list
*zero_perm
;
44 zero_perm
= clone_rl_permanent(alloc_rl(zero
, zero
));
48 struct range_list
*rl_one(void)
50 static struct range_list
*one_perm
;
53 one_perm
= clone_rl_permanent(alloc_rl(one
, one
));
67 static bool last_stmt_rl(struct statement
*stmt
, int implied
, int *recurse_cnt
, struct range_list
**res
, sval_t
*res_sval
)
69 struct expression
*expr
;
74 stmt
= last_ptr_list((struct ptr_list
*)stmt
->stmts
);
75 if (stmt
->type
== STMT_LABEL
) {
76 if (stmt
->label_statement
&&
77 stmt
->label_statement
->type
== STMT_EXPRESSION
)
78 expr
= stmt
->label_statement
->expression
;
81 } else if (stmt
->type
== STMT_EXPRESSION
) {
82 expr
= stmt
->expression
;
86 return get_rl_sval(expr
, implied
, recurse_cnt
, res
, res_sval
);
89 static bool handle_expression_statement_rl(struct expression
*expr
, int implied
,
90 int *recurse_cnt
, struct range_list
**res
, sval_t
*res_sval
)
92 return last_stmt_rl(get_expression_statement(expr
), implied
, recurse_cnt
, res
, res_sval
);
95 static bool handle_address(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
, sval_t
*res_sval
)
97 struct range_list
*rl
;
103 if (implied
== RL_EXACT
)
106 if (custom_handle_variable
) {
107 rl
= custom_handle_variable(expr
);
115 if (get_mtag_sval(expr
, &sval
)) {
121 if (get_address_rl(expr
, res
)) {
129 static bool handle_ampersand_rl(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
, sval_t
*res_sval
)
131 return handle_address(expr
, implied
, recurse_cnt
, res
, res_sval
);
134 static bool handle_negate_rl(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
, sval_t
*res_sval
)
136 if (known_condition_true(expr
->unop
)) {
140 if (known_condition_false(expr
->unop
)) {
145 if (implied
== RL_EXACT
)
148 if (implied_condition_true(expr
->unop
)) {
152 if (implied_condition_false(expr
->unop
)) {
157 *res
= alloc_rl(zero
, one
);
161 static bool handle_bitwise_negate(struct expression
*expr
, int implied
, int *recurse_cnt
, sval_t
*res_sval
)
163 struct range_list
*rl
;
166 if (!get_rl_sval(expr
->unop
, implied
, recurse_cnt
, &rl
, &sval
))
168 if (!sval
.type
&& !rl_to_sval(rl
, &sval
))
170 sval
= sval_preop(sval
, '~');
171 sval_cast(get_type(expr
->unop
), sval
);
176 static bool untrusted_type_min(struct expression
*expr
)
178 struct range_list
*rl
;
180 rl
= var_user_rl(expr
);
181 return rl
&& sval_is_min(rl_min(rl
));
184 static bool handle_minus_preop(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
, sval_t
*res_sval
)
186 struct range_list
*rl
;
187 struct range_list
*ret
= NULL
;
189 sval_t neg_one
= { .value
= -1 };
190 sval_t zero
= { .value
= 0 };
193 if (!get_rl_sval(expr
->unop
, implied
, recurse_cnt
, &rl
, &sval
))
196 *res_sval
= sval_preop(sval
, '-');
200 * One complication is that -INT_MIN is still INT_MIN because of integer
201 * overflows... But how many times do we set a time out to INT_MIN?
202 * So normally when we call abs() then it does return a positive value.
206 neg_one
.type
= zero
.type
= type
;
208 if (sval_is_negative(rl_min(rl
))) {
209 struct range_list
*neg
;
210 struct data_range
*drange
;
211 sval_t new_min
, new_max
;
213 neg
= alloc_rl(sval_type_min(type
), neg_one
);
214 neg
= rl_intersection(rl
, neg
);
216 if (sval_is_min(rl_min(neg
)) && !sval_is_min(rl_max(neg
)))
217 neg
= remove_range(neg
, sval_type_min(type
), sval_type_min(type
));
219 FOR_EACH_PTR(neg
, drange
) {
220 new_min
= drange
->max
;
221 new_min
.value
= -new_min
.value
;
222 new_max
= drange
->min
;
223 new_max
.value
= -new_max
.value
;
224 add_range(&ret
, new_min
, new_max
);
225 } END_FOR_EACH_PTR(drange
);
227 if (untrusted_type_min(expr
))
228 add_range(&ret
, sval_type_min(type
), sval_type_min(type
));
231 if (!sval_is_negative(rl_max(rl
))) {
232 struct range_list
*pos
;
233 struct data_range
*drange
;
234 sval_t new_min
, new_max
;
236 pos
= alloc_rl(zero
, sval_type_max(type
));
237 pos
= rl_intersection(rl
, pos
);
239 FOR_EACH_PTR(pos
, drange
) {
240 new_min
= drange
->max
;
241 new_min
.value
= -new_min
.value
;
242 new_max
= drange
->min
;
243 new_max
.value
= -new_max
.value
;
244 add_range(&ret
, new_min
, new_max
);
245 } END_FOR_EACH_PTR(drange
);
252 static bool handle_preop_rl(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
, sval_t
*res_sval
)
256 return handle_ampersand_rl(expr
, implied
, recurse_cnt
, res
, res_sval
);
258 return handle_negate_rl(expr
, implied
, recurse_cnt
, res
, res_sval
);
260 return handle_bitwise_negate(expr
, implied
, recurse_cnt
, res_sval
);
262 return handle_minus_preop(expr
, implied
, recurse_cnt
, res
, res_sval
);
264 return handle_variable(expr
, implied
, recurse_cnt
, res
, res_sval
);
266 return handle_expression_statement_rl(expr
, implied
, recurse_cnt
, res
, res_sval
);
272 static bool handle_divide_rl(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
)
274 struct range_list
*left_rl
= NULL
;
275 struct range_list
*right_rl
= NULL
;
278 type
= get_type(expr
);
280 get_rl_internal(expr
->left
, implied
, recurse_cnt
, &left_rl
);
281 left_rl
= cast_rl(type
, left_rl
);
282 get_rl_internal(expr
->right
, implied
, recurse_cnt
, &right_rl
);
283 right_rl
= cast_rl(type
, right_rl
);
285 if (!left_rl
|| !right_rl
)
288 if (implied
!= RL_REAL_ABSOLUTE
) {
289 if (is_whole_rl(left_rl
) || is_whole_rl(right_rl
))
293 *res
= rl_binop(left_rl
, '/', right_rl
);
297 static int handle_offset_subtraction(struct expression
*expr
)
299 struct expression
*left
, *right
;
300 struct symbol
*left_sym
, *right_sym
;
302 int left_offset
, right_offset
;
304 type
= get_type(expr
);
305 if (!type
|| type
->type
!= SYM_PTR
)
307 type
= get_real_base_type(type
);
308 if (!type
|| (type_bits(type
) != 8 && (type
!= &void_ctype
)))
311 left
= strip_expr(expr
->left
);
312 right
= strip_expr(expr
->right
);
314 if (left
->type
!= EXPR_PREOP
|| left
->op
!= '&')
316 left
= strip_expr(left
->unop
);
318 left_sym
= expr_to_sym(left
);
319 right_sym
= expr_to_sym(right
);
320 if (!left_sym
|| left_sym
!= right_sym
)
323 left_offset
= get_member_offset_from_deref(left
);
324 if (right
->type
== EXPR_SYMBOL
)
327 if (right
->type
!= EXPR_PREOP
|| right
->op
!= '&')
329 right
= strip_expr(right
->unop
);
330 right_offset
= get_member_offset_from_deref(right
);
332 if (left_offset
< 0 || right_offset
< 0)
335 return left_offset
- right_offset
;
338 static bool handle_container_of(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
)
340 struct expression
*left
, *right
;
341 struct range_list
*left_orig
= NULL
;
343 sval_t left_sval
, right_sval
;
346 * I'm not 100% if ABSOLUTE should be handled like this but I think if
347 * IMPLIED overrules ABSOLUTE so it's a moot point.
349 * What this function does is if we have:
350 * p = container_of(foo, struct my_struct, member);
351 * Then if the offset is non-zero we can assume that p is a valid
352 * pointer. Mathematically, that's not necessarily true, but in
353 * pratical terms if p isn't valid then we're already in deep trouble
354 * to the point where printing more warnings now won't help.
356 * There are places were the author knows that container_of() is a
357 * no-op so the code will do a NULL test on the result. (This is
358 * obviously horrible code). So to handle code like this if the offset
359 * is zero then the result can be NULL.
361 if (implied
!= RL_IMPLIED
&&
362 implied
!= RL_ABSOLUTE
&&
363 implied
!= RL_REAL_ABSOLUTE
)
366 type
= get_type(expr
);
367 if (!type
|| type
->type
!= SYM_PTR
)
369 type
= get_real_base_type(type
);
370 if (!type
|| (type_bits(type
) != 8 && (type
!= &void_ctype
)))
373 left
= strip_expr(expr
->left
);
374 right
= strip_expr(expr
->right
);
376 if (right
->type
!= EXPR_OFFSETOF
)
379 if (!get_value(right
, &right_sval
))
381 /* Handle offset == 0 in the caller if possible. */
382 if (right_sval
.value
== 0)
385 get_rl_internal(left
, implied
, recurse_cnt
, &left_orig
);
387 * I think known binops are already handled at this point so this
388 * should be impossible. But handle it in the caller either way.
390 if (rl_to_sval(left_orig
, &left_sval
))
393 // TODO: it might be safer to say that known possible NULL or error
394 // error pointers return false.
396 *res
= clone_rl(valid_ptr_rl
);
401 static bool max_is_unknown_max(struct range_list
*rl
)
404 * The issue with this code is that we had:
405 * if (foo > 1) return 1 - foo;
406 * Ideally we would say that returns s32min-(-1) but what Smatch
407 * was saying was that the lowest possible value was "1 - INT_MAX"
409 * My solution is to ignore max values for int or larger. I keep
410 * the max for shorts etc, because those might be worthwhile.
412 * The problem with just returning 1 - INT_MAX is that that is
413 * treated as useful information but s32min is treated as basically
417 if (type_bits(rl_type(rl
)) < 31)
419 return sval_is_max(rl_max(rl
));
422 static bool handle_add_rl(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
)
424 struct range_list
*left_rl
= NULL
;
425 struct range_list
*right_rl
= NULL
;
426 struct range_list
*valid
;
430 type
= get_type(expr
);
432 get_rl_internal(expr
->left
, implied
, recurse_cnt
, &left_rl
);
433 left_rl
= cast_rl(type
, left_rl
);
434 get_rl_internal(expr
->right
, implied
, recurse_cnt
, &right_rl
);
435 right_rl
= cast_rl(type
, right_rl
);
440 if (type_is_ptr(type
) && !var_user_rl(expr
->right
)) {
441 valid
= rl_intersection(left_rl
, valid_ptr_rl
);
442 if (valid
&& rl_equiv(valid
, left_rl
))
449 if (sval_binop_overflows(rl_min(left_rl
), expr
->op
, rl_min(right_rl
)))
451 if (sval_binop_overflows(rl_max(left_rl
), expr
->op
, rl_max(right_rl
)))
454 min
= sval_binop(rl_min(left_rl
), expr
->op
, rl_min(right_rl
));
455 max
= sval_binop(rl_max(left_rl
), expr
->op
, rl_max(right_rl
));
457 *res
= alloc_rl(min
, max
);
461 static bool handle_subtract_rl(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
)
464 struct range_list
*left_orig
, *right_orig
;
465 struct range_list
*left_rl
, *right_rl
;
466 sval_t min
, max
, tmp
;
470 type
= get_type(expr
);
472 offset
= handle_offset_subtraction(expr
);
477 *res
= alloc_rl(tmp
, tmp
);
481 if (handle_container_of(expr
, implied
, recurse_cnt
, res
))
484 comparison
= get_comparison(expr
->left
, expr
->right
);
487 get_rl_internal(expr
->left
, implied
, recurse_cnt
, &left_orig
);
488 left_rl
= cast_rl(type
, left_orig
);
490 get_rl_internal(expr
->right
, implied
, recurse_cnt
, &right_orig
);
491 right_rl
= cast_rl(type
, right_orig
);
493 if ((!left_rl
|| !right_rl
) &&
494 (implied
== RL_EXACT
|| implied
== RL_HARD
|| implied
== RL_FUZZY
))
498 left_rl
= alloc_whole_rl(type
);
500 right_rl
= alloc_whole_rl(type
);
502 /* negative values complicate everything fix this later */
503 if (sval_is_negative(rl_min(right_rl
)))
505 max
= rl_max(left_rl
);
506 min
= sval_type_min(type
);
508 switch (comparison
) {
510 case SPECIAL_UNSIGNED_GT
:
511 min
= sval_type_val(type
, 1);
512 max
= rl_max(left_rl
);
515 case SPECIAL_UNSIGNED_GTE
:
516 min
= sval_type_val(type
, 0);
517 max
= rl_max(left_rl
);
520 min
= sval_type_val(type
, 0);
521 max
= sval_type_val(type
, 0);
524 case SPECIAL_UNSIGNED_LT
:
525 max
= sval_type_val(type
, -1);
528 case SPECIAL_UNSIGNED_LTE
:
529 max
= sval_type_val(type
, 0);
532 if (!left_orig
|| !right_orig
)
534 *res
= rl_binop(left_rl
, '-', right_rl
);
538 if (!max_is_unknown_max(right_rl
) &&
539 !sval_binop_overflows(rl_min(left_rl
), '-', rl_max(right_rl
))) {
540 tmp
= sval_binop(rl_min(left_rl
), '-', rl_max(right_rl
));
541 if (sval_cmp(tmp
, min
) > 0)
545 if (!sval_is_max(rl_max(left_rl
))) {
546 tmp
= sval_binop(rl_max(left_rl
), '-', rl_min(right_rl
));
547 if (sval_cmp(tmp
, max
) < 0)
551 if (sval_is_min(min
) && sval_is_max(max
))
554 *res
= cast_rl(type
, alloc_rl(min
, max
));
558 static bool handle_mod_rl(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
)
560 struct range_list
*rl
;
561 sval_t left
, right
, sval
;
563 if (implied
== RL_EXACT
) {
564 if (!get_implied_value(expr
->right
, &right
))
566 if (!get_implied_value(expr
->left
, &left
))
568 sval
= sval_binop(left
, '%', right
);
569 *res
= alloc_rl(sval
, sval
);
572 /* if we can't figure out the right side it's probably hopeless */
573 if (!get_implied_value_internal(expr
->right
, recurse_cnt
, &right
))
576 right
= sval_cast(get_type(expr
), right
);
579 if (get_rl_internal(expr
->left
, implied
, recurse_cnt
, &rl
) && rl
&&
580 rl_max(rl
).uvalue
< right
.uvalue
)
581 right
.uvalue
= rl_max(rl
).uvalue
;
583 *res
= alloc_rl(sval_cast(right
.type
, zero
), right
);
587 static bool handle_bitwise_AND(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
)
590 struct range_list
*left_rl
, *right_rl
;
593 if (implied
!= RL_IMPLIED
&& implied
!= RL_ABSOLUTE
&& implied
!= RL_REAL_ABSOLUTE
)
596 type
= get_type(expr
);
598 if (!get_rl_internal(expr
->left
, implied
, recurse_cnt
, &left_rl
))
599 left_rl
= alloc_whole_rl(type
);
600 left_rl
= cast_rl(type
, left_rl
);
602 new_recurse
= *recurse_cnt
;
603 if (*recurse_cnt
>= 200)
604 new_recurse
= 100; /* Let's try super hard to get the mask */
605 if (!get_rl_internal(expr
->right
, implied
, &new_recurse
, &right_rl
))
606 right_rl
= alloc_whole_rl(type
);
607 right_rl
= cast_rl(type
, right_rl
);
608 *recurse_cnt
= new_recurse
;
610 *res
= rl_binop(left_rl
, '&', right_rl
);
614 static bool use_rl_binop(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
)
617 struct range_list
*left_rl
, *right_rl
;
619 if (implied
!= RL_IMPLIED
&& implied
!= RL_ABSOLUTE
&& implied
!= RL_REAL_ABSOLUTE
)
622 type
= get_type(expr
);
624 get_absolute_rl_internal(expr
->left
, &left_rl
, recurse_cnt
);
625 get_absolute_rl_internal(expr
->right
, &right_rl
, recurse_cnt
);
626 left_rl
= cast_rl(type
, left_rl
);
627 right_rl
= cast_rl(type
, right_rl
);
628 if (!left_rl
|| !right_rl
)
631 *res
= rl_binop(left_rl
, expr
->op
, right_rl
);
635 static bool handle_right_shift(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
)
637 struct range_list
*left_rl
, *right_rl
;
640 if (implied
== RL_EXACT
|| implied
== RL_HARD
)
643 if (get_rl_internal(expr
->left
, implied
, recurse_cnt
, &left_rl
)) {
644 max
= rl_max(left_rl
);
645 min
= rl_min(left_rl
);
647 if (implied
== RL_FUZZY
)
649 max
= sval_type_max(get_type(expr
->left
));
650 min
= sval_type_val(get_type(expr
->left
), 0);
653 if (get_rl_internal(expr
->right
, implied
, recurse_cnt
, &right_rl
) &&
654 !sval_is_negative(rl_min(right_rl
))) {
655 min
= sval_binop(min
, SPECIAL_RIGHTSHIFT
, rl_max(right_rl
));
656 max
= sval_binop(max
, SPECIAL_RIGHTSHIFT
, rl_min(right_rl
));
657 } else if (!sval_is_negative(min
)) {
659 max
= sval_type_max(max
.type
);
664 *res
= alloc_rl(min
, max
);
668 static bool handle_left_shift(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
)
670 struct range_list
*left_rl
, *rl
;
673 if (implied
== RL_EXACT
|| implied
== RL_HARD
)
675 /* this is hopeless without the right side */
676 if (!get_implied_value_internal(expr
->right
, recurse_cnt
, &right
))
678 if (!get_rl_internal(expr
->left
, implied
, recurse_cnt
, &left_rl
)) {
679 if (implied
== RL_FUZZY
)
681 left_rl
= alloc_whole_rl(get_type(expr
->left
));
684 rl
= rl_binop(left_rl
, SPECIAL_LEFTSHIFT
, alloc_rl(right
, right
));
691 static bool handle_known_binop(struct expression
*expr
, sval_t
*res
)
695 if (!get_value(expr
->left
, &left
))
697 if (!get_value(expr
->right
, &right
))
699 *res
= sval_binop(left
, expr
->op
, right
);
703 static int has_actual_ranges(struct range_list
*rl
)
705 struct data_range
*tmp
;
707 FOR_EACH_PTR(rl
, tmp
) {
708 if (sval_cmp(tmp
->min
, tmp
->max
) != 0)
710 } END_FOR_EACH_PTR(tmp
);
714 static struct range_list
*handle_implied_binop(struct range_list
*left_rl
, int op
, struct range_list
*right_rl
)
716 struct range_list
*res_rl
;
717 struct data_range
*left_drange
, *right_drange
;
720 if (!left_rl
|| !right_rl
)
722 if (has_actual_ranges(left_rl
))
724 if (has_actual_ranges(right_rl
))
727 if (ptr_list_size((struct ptr_list
*)left_rl
) * ptr_list_size((struct ptr_list
*)right_rl
) > 20)
732 FOR_EACH_PTR(left_rl
, left_drange
) {
733 FOR_EACH_PTR(right_rl
, right_drange
) {
734 if ((op
== '%' || op
== '/') &&
735 right_drange
->min
.value
== 0)
737 res
= sval_binop(left_drange
->min
, op
, right_drange
->min
);
738 add_range(&res_rl
, res
, res
);
739 } END_FOR_EACH_PTR(right_drange
);
740 } END_FOR_EACH_PTR(left_drange
);
745 static bool handle_binop_rl_helper(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
, sval_t
*res_sval
)
748 struct range_list
*left_rl
= NULL
;
749 struct range_list
*right_rl
= NULL
;
750 struct range_list
*rl
;
753 type
= get_promoted_type(get_type(expr
->left
), get_type(expr
->right
));
754 get_rl_internal(expr
->left
, implied
, recurse_cnt
, &left_rl
);
755 left_rl
= cast_rl(type
, left_rl
);
756 get_rl_internal(expr
->right
, implied
, recurse_cnt
, &right_rl
);
757 right_rl
= cast_rl(type
, right_rl
);
759 rl
= handle_implied_binop(left_rl
, expr
->op
, right_rl
);
767 return handle_mod_rl(expr
, implied
, recurse_cnt
, res
);
769 return handle_bitwise_AND(expr
, implied
, recurse_cnt
, res
);
772 return use_rl_binop(expr
, implied
, recurse_cnt
, res
);
773 case SPECIAL_RIGHTSHIFT
:
774 return handle_right_shift(expr
, implied
, recurse_cnt
, res
);
775 case SPECIAL_LEFTSHIFT
:
776 return handle_left_shift(expr
, implied
, recurse_cnt
, res
);
778 return handle_add_rl(expr
, implied
, recurse_cnt
, res
);
780 return handle_subtract_rl(expr
, implied
, recurse_cnt
, res
);
782 return handle_divide_rl(expr
, implied
, recurse_cnt
, res
);
785 if (!left_rl
|| !right_rl
)
788 if (sval_binop_overflows(rl_min(left_rl
), expr
->op
, rl_min(right_rl
)))
790 if (sval_binop_overflows(rl_max(left_rl
), expr
->op
, rl_max(right_rl
)))
793 min
= sval_binop(rl_min(left_rl
), expr
->op
, rl_min(right_rl
));
794 max
= sval_binop(rl_max(left_rl
), expr
->op
, rl_max(right_rl
));
796 *res
= alloc_rl(min
, max
);
801 static bool handle_binop_rl(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
, sval_t
*res_sval
)
803 struct smatch_state
*state
;
804 struct range_list
*rl
;
808 *res_sval
= *expr
->sval
;
812 if (handle_known_binop(expr
, &val
)) {
813 expr
->sval
= malloc(sizeof(sval_t
));
818 if (implied
== RL_EXACT
)
821 if (custom_handle_variable
) {
822 rl
= custom_handle_variable(expr
);
829 state
= get_extra_state(expr
);
830 if (state
&& !is_whole_rl(estate_rl(state
))) {
831 if (implied
!= RL_HARD
|| estate_has_hard_max(state
)) {
832 *res
= clone_rl(estate_rl(state
));
837 return handle_binop_rl_helper(expr
, implied
, recurse_cnt
, res
, res_sval
);
840 static int do_comparison(struct expression
*expr
)
842 struct range_list
*left_ranges
= NULL
;
843 struct range_list
*right_ranges
= NULL
;
844 int poss_true
, poss_false
;
847 type
= get_type(expr
);
848 get_absolute_rl(expr
->left
, &left_ranges
);
849 get_absolute_rl(expr
->right
, &right_ranges
);
851 left_ranges
= cast_rl(type
, left_ranges
);
852 right_ranges
= cast_rl(type
, right_ranges
);
854 poss_true
= possibly_true_rl(left_ranges
, expr
->op
, right_ranges
);
855 poss_false
= possibly_false_rl(left_ranges
, expr
->op
, right_ranges
);
857 if (!poss_true
&& !poss_false
)
859 if (poss_true
&& !poss_false
)
861 if (!poss_true
&& poss_false
)
866 static bool handle_comparison_rl(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
, sval_t
*res_sval
)
871 if (expr
->op
== SPECIAL_EQUAL
&& expr
->left
->type
== EXPR_TYPE
) {
872 struct symbol
*left
, *right
;
874 if (expr
->right
->type
!= EXPR_TYPE
)
877 left
= get_real_base_type(expr
->left
->symbol
);
878 right
= get_real_base_type(expr
->right
->symbol
);
880 while (type_is_ptr(left
) || type_is_ptr(right
)) {
882 if ((type_is_ptr(left
) && !type_is_ptr(right
)) ||
883 (!type_is_ptr(left
) && type_is_ptr(right
))) {
888 left
= get_real_base_type(left
);
889 right
= get_real_base_type(right
);
892 if (type_bits(left
) == type_bits(right
) &&
893 type_positive_bits(left
) == type_positive_bits(right
))
900 if (get_value(expr
->left
, &left
) && get_value(expr
->right
, &right
)) {
901 struct data_range tmp_left
, tmp_right
;
905 tmp_right
.min
= right
;
906 tmp_right
.max
= right
;
907 if (true_comparison_range(&tmp_left
, expr
->op
, &tmp_right
))
914 if (implied
== RL_EXACT
)
917 cmp
= do_comparison(expr
);
927 *res
= alloc_rl(zero
, one
);
931 static bool handle_logical_rl(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
, sval_t
*res_sval
)
937 if (implied
== RL_EXACT
) {
938 if (get_value(expr
->left
, &left
))
940 if (get_value(expr
->right
, &right
))
943 if (get_implied_value_internal(expr
->left
, recurse_cnt
, &left
))
945 if (get_implied_value_internal(expr
->right
, recurse_cnt
, &right
))
950 case SPECIAL_LOGICAL_OR
:
951 if (left_known
&& left
.value
)
953 if (right_known
&& right
.value
)
955 if (left_known
&& right_known
)
958 case SPECIAL_LOGICAL_AND
:
959 if (left_known
&& left
.value
== 0)
961 if (right_known
&& right
.value
== 0)
963 if (left_known
&& right_known
)
970 if (implied
== RL_EXACT
)
973 *res
= alloc_rl(zero
, one
);
984 static bool handle_conditional_rl(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
, sval_t
*res_sval
)
986 struct expression
*cond_true
;
987 struct range_list
*true_rl
, *false_rl
;
990 cond_true
= expr
->cond_true
;
992 cond_true
= expr
->conditional
;
994 if (known_condition_true(expr
->conditional
))
995 return get_rl_sval(cond_true
, implied
, recurse_cnt
, res
, res_sval
);
996 if (known_condition_false(expr
->conditional
))
997 return get_rl_sval(expr
->cond_false
, implied
, recurse_cnt
, res
, res_sval
);
999 if (implied
== RL_EXACT
)
1002 if (implied_condition_true(expr
->conditional
))
1003 return get_rl_sval(cond_true
, implied
, recurse_cnt
, res
, res_sval
);
1004 if (implied_condition_false(expr
->conditional
))
1005 return get_rl_sval(expr
->cond_false
, implied
, recurse_cnt
, res
, res_sval
);
1007 /* this becomes a problem with deeply nested conditional statements */
1008 if (fast_math_only
|| low_on_memory())
1011 type
= get_type(expr
);
1014 __split_whole_condition(expr
->conditional
);
1016 get_rl_internal(cond_true
, implied
, recurse_cnt
, &true_rl
);
1017 __push_true_states();
1018 __use_false_states();
1020 get_rl_internal(expr
->cond_false
, implied
, recurse_cnt
, &false_rl
);
1021 __merge_true_states();
1024 if (!true_rl
|| !false_rl
)
1026 true_rl
= cast_rl(type
, true_rl
);
1027 false_rl
= cast_rl(type
, false_rl
);
1029 *res
= rl_union(true_rl
, false_rl
);
1033 static bool get_fuzzy_max_helper(struct expression
*expr
, sval_t
*max
)
1035 struct smatch_state
*state
;
1038 if (get_hard_max(expr
, &sval
)) {
1043 state
= get_extra_state(expr
);
1044 if (!state
|| !estate_has_fuzzy_max(state
))
1046 *max
= sval_cast(get_type(expr
), estate_get_fuzzy_max(state
));
1050 static bool get_fuzzy_min_helper(struct expression
*expr
, sval_t
*min
)
1052 struct smatch_state
*state
;
1055 state
= get_extra_state(expr
);
1056 if (!state
|| !estate_rl(state
))
1059 sval
= estate_min(state
);
1060 if (sval_is_negative(sval
) && sval_is_min(sval
))
1063 if (sval_is_max(sval
))
1066 *min
= sval_cast(get_type(expr
), sval
);
1070 int get_const_value(struct expression
*expr
, sval_t
*sval
)
1075 if (expr
->type
!= EXPR_SYMBOL
|| !expr
->symbol
)
1078 if (!(sym
->ctype
.modifiers
& MOD_CONST
))
1080 if (get_value(sym
->initializer
, &right
)) {
1081 *sval
= sval_cast(get_type(expr
), right
);
1087 struct range_list
*var_to_absolute_rl(struct expression
*expr
)
1089 struct smatch_state
*state
;
1090 struct range_list
*rl
;
1092 state
= get_extra_state(expr
);
1093 if (!state
|| is_whole_rl(estate_rl(state
))) {
1094 state
= get_real_absolute_state(expr
);
1095 if (state
&& state
->data
&& !estate_is_whole(state
))
1096 return clone_rl(estate_rl(state
));
1097 if (get_mtag_rl(expr
, &rl
))
1099 if (get_db_type_rl(expr
, &rl
) && !is_whole_rl(rl
))
1101 return alloc_whole_rl(get_type(expr
));
1103 return clone_rl(estate_rl(state
));
1106 static bool is_param_sym(struct expression
*expr
)
1108 if (expr
->type
!= EXPR_SYMBOL
)
1110 if (get_param_num(expr
) < 0)
1115 static bool handle_variable(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
, sval_t
*res_sval
)
1117 struct smatch_state
*state
;
1118 struct range_list
*rl
;
1119 sval_t sval
, min
, max
;
1120 struct symbol
*type
;
1122 if (get_const_value(expr
, &sval
)) {
1127 if (implied
== RL_EXACT
)
1130 if (custom_handle_variable
) {
1131 rl
= custom_handle_variable(expr
);
1133 if (!rl_to_sval(rl
, res_sval
))
1136 *res
= var_to_absolute_rl(expr
);
1141 if (get_mtag_sval(expr
, &sval
)) {
1146 type
= get_type(expr
);
1148 ((type
->type
== SYM_ARRAY
&& !is_param_sym(expr
)) ||
1149 type
->type
== SYM_FN
))
1150 return handle_address(expr
, implied
, recurse_cnt
, res
, res_sval
);
1152 /* FIXME: call rl_to_sval() on the results */
1158 state
= get_extra_state(expr
);
1160 if (implied
== RL_HARD
)
1162 if (get_mtag_rl(expr
, res
))
1164 if (is_array(expr
) && get_array_rl(expr
, res
))
1166 if (implied
== RL_IMPLIED
)
1168 if (get_db_type_rl(expr
, res
))
1172 if (implied
== RL_HARD
&& !estate_has_hard_max(state
))
1174 *res
= clone_rl(estate_rl(state
));
1176 case RL_REAL_ABSOLUTE
: {
1177 struct smatch_state
*abs_state
;
1179 state
= get_extra_state(expr
);
1180 abs_state
= get_real_absolute_state(expr
);
1182 if (estate_rl(state
) && estate_rl(abs_state
)) {
1183 *res
= clone_rl(rl_intersection(estate_rl(state
),
1184 estate_rl(abs_state
)));
1186 } else if (estate_rl(state
)) {
1187 *res
= clone_rl(estate_rl(state
));
1189 } else if (estate_is_empty(state
)) {
1191 * FIXME: we don't handle empty extra states correctly.
1193 * The real abs rl is supposed to be filtered by the
1194 * extra state if there is one. We don't bother keeping
1195 * the abs state in sync all the time because we know it
1196 * will be filtered later.
1198 * It's not totally obvious to me how they should be
1199 * handled. Perhaps we should take the whole rl and
1200 * filter by the imaginary states. Perhaps we should
1201 * just go with the empty state.
1203 * Anyway what we currently do is return NULL here and
1204 * that gets translated into the whole range in
1205 * get_real_absolute_rl().
1209 } else if (estate_rl(abs_state
)) {
1210 *res
= clone_rl(estate_rl(abs_state
));
1214 if (get_mtag_rl(expr
, res
))
1216 if (get_db_type_rl(expr
, res
))
1218 if (is_array(expr
) && get_array_rl(expr
, res
))
1223 if (!get_fuzzy_min_helper(expr
, &min
))
1224 min
= sval_type_min(get_type(expr
));
1225 if (!get_fuzzy_max_helper(expr
, &max
))
1227 /* fuzzy ranges are often inverted */
1228 if (sval_cmp(min
, max
) > 0) {
1233 *res
= alloc_rl(min
, max
);
1239 static sval_t
handle_sizeof(struct expression
*expr
)
1244 ret
= sval_blank(expr
);
1245 sym
= expr
->cast_type
;
1247 sym
= evaluate_expression(expr
->cast_expression
);
1249 __silence_warnings_for_stmt
= true;
1254 * Expressions of restricted types will possibly get
1255 * promoted - check that here. I'm not sure how this works,
1256 * the problem is that sizeof(le16) shouldn't be promoted and
1257 * the original code did that... Let's if zero this out and
1261 if (is_restricted_type(sym
)) {
1262 if (type_bits(sym
) < bits_in_int
)
1266 if (is_fouled_type(sym
))
1269 examine_symbol_type(sym
);
1271 ret
.type
= size_t_ctype
;
1272 if (type_bits(sym
) <= 0) /* sizeof(void) */ {
1273 if (get_real_base_type(sym
) == &void_ctype
)
1278 ret
.value
= type_bytes(sym
);
1283 static bool handle_strlen(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
, sval_t
*res_sval
)
1285 struct expression
*arg
, *tmp
;
1287 sval_t ret
= { .type
= &ulong_ctype
};
1288 struct range_list
*rl
;
1290 arg
= get_argument_from_call_expr(expr
->args
, 0);
1293 if (arg
->type
== EXPR_STRING
) {
1294 ret
.value
= arg
->string
->length
- 1;
1298 if (implied
== RL_EXACT
)
1300 if (get_implied_value(arg
, &tag
) &&
1301 (tmp
= fake_string_from_mtag(tag
.uvalue
))) {
1302 ret
.value
= tmp
->string
->length
- 1;
1307 if (implied
== RL_HARD
|| implied
== RL_FUZZY
)
1310 if (get_implied_return(expr
, &rl
)) {
1318 static bool handle_builtin_constant_p(struct expression
*expr
, int implied
, int *recurse_cnt
, sval_t
*res_sval
)
1320 struct expression
*arg
, *assigned
;
1321 struct range_list
*rl
;
1324 arg
= get_argument_from_call_expr(expr
->args
, 0);
1326 * Originally, Smatch used to pretend there were no constants but then
1327 * it turned out that we need to know at build time if some paths are
1328 * impossible or not to avoid crazy false positives.
1330 * But then someone added a BUILD_BUG_ON(!__builtin_constant_p(_mask)).
1331 * So now we try to figure out if GCC can determine the value at
1334 if (get_rl_internal(arg
, RL_EXACT
, recurse_cnt
, &rl
)) {
1344 assigned
= get_assigned_expr(arg
);
1346 if (assigned
&& get_rl_internal(assigned
, RL_EXACT
, recurse_cnt
, &rl
))
1355 static bool handle__builtin_choose_expr(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
, sval_t
*res_sval
)
1357 struct expression
*const_expr
, *expr1
, *expr2
;
1360 const_expr
= get_argument_from_call_expr(expr
->args
, 0);
1361 expr1
= get_argument_from_call_expr(expr
->args
, 1);
1362 expr2
= get_argument_from_call_expr(expr
->args
, 2);
1364 if (!get_value(const_expr
, &sval
) || !expr1
|| !expr2
)
1367 return get_rl_sval(expr1
, implied
, recurse_cnt
, res
, res_sval
);
1369 return get_rl_sval(expr2
, implied
, recurse_cnt
, res
, res_sval
);
1372 int smatch_fls(unsigned long long value
)
1376 for (i
= 63; i
>= 0; i
--) {
1377 if (value
& 1ULL << i
)
1383 static bool handle_ffs(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
, sval_t
*res_sval
)
1385 struct expression
*arg
;
1386 struct bit_info
*bits
;
1387 sval_t high
= { .type
= &int_ctype
};
1388 sval_t low
= { .type
= &int_ctype
};
1390 arg
= get_argument_from_call_expr(expr
->args
, 0);
1392 bits
= get_bit_info(arg
);
1393 if (bits
->possible
== 0) {
1399 high
.value
= ffsll(bits
->set
);
1401 high
.value
= smatch_fls(bits
->possible
);
1403 low
.value
= ffsll(bits
->possible
);
1405 *res
= alloc_rl(low
, high
);
1409 static bool handle_call_rl(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
, sval_t
*res_sval
)
1411 struct range_list
*rl
;
1413 if (sym_name_is("__builtin_constant_p", expr
->fn
))
1414 return handle_builtin_constant_p(expr
, implied
, recurse_cnt
, res_sval
);
1416 if (sym_name_is("__builtin_choose_expr", expr
->fn
))
1417 return handle__builtin_choose_expr(expr
, implied
, recurse_cnt
, res
, res_sval
);
1419 if (sym_name_is("__builtin_expect", expr
->fn
) ||
1420 sym_name_is("__builtin_bswap16", expr
->fn
) ||
1421 sym_name_is("__builtin_bswap32", expr
->fn
) ||
1422 sym_name_is("__builtin_bswap64", expr
->fn
)) {
1423 struct expression
*arg
;
1425 arg
= get_argument_from_call_expr(expr
->args
, 0);
1426 return get_rl_sval(arg
, implied
, recurse_cnt
, res
, res_sval
);
1429 if (sym_name_is("__builtin_ffs", expr
->fn
) ||
1430 sym_name_is("__builtin_ffsl", expr
->fn
) ||
1431 sym_name_is("__builtin_ffsll", expr
->fn
) ||
1432 sym_name_is("__ffs", expr
->fn
))
1433 return handle_ffs(expr
, implied
, recurse_cnt
, res
, res_sval
);
1435 if (sym_name_is("strlen", expr
->fn
))
1436 return handle_strlen(expr
, implied
, recurse_cnt
, res
, res_sval
);
1438 if (implied
== RL_EXACT
|| implied
== RL_HARD
)
1441 if (custom_handle_variable
) {
1442 rl
= custom_handle_variable(expr
);
1449 /* Ugh... get_implied_return() sets *rl to NULL on failure */
1450 if (get_implied_return(expr
, &rl
)) {
1454 rl
= db_return_vals(expr
);
1462 static bool handle_cast(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
, sval_t
*res_sval
)
1464 struct range_list
*rl
;
1465 struct symbol
*type
;
1468 type
= get_type(expr
);
1469 if (get_rl_sval(expr
->cast_expression
, implied
, recurse_cnt
, &rl
, &sval
)) {
1471 *res_sval
= sval_cast(type
, sval
);
1473 *res
= cast_rl(type
, rl
);
1476 if (implied
== RL_ABSOLUTE
|| implied
== RL_REAL_ABSOLUTE
) {
1477 *res
= alloc_whole_rl(type
);
1480 if (implied
== RL_IMPLIED
&& type
&&
1481 type_bits(type
) > 0 && type_bits(type
) < 32) {
1482 *res
= alloc_whole_rl(type
);
1488 static bool handle_offsetof_rl(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
, sval_t
*res_sval
)
1490 struct expression
*down
= expr
->down
;
1491 struct range_list
*offset_rl
= NULL
, *down_rl
= NULL
;
1492 sval_t sval
= { .type
= ssize_t_ctype
};
1493 struct symbol
*type
;
1495 type
= get_real_base_type(expr
->in
);
1499 if (expr
->op
== '.') {
1500 struct symbol
*field
;
1503 field
= find_identifier(expr
->ident
, type
->symbol_list
, &offset
);
1507 sval
.value
= offset
;
1508 offset_rl
= alloc_rl(sval
, sval
);
1513 offset_rl
= alloc_rl(sval
, sval
);
1515 struct range_list
*idx_rl
= NULL
, *bytes_rl
;
1517 if (get_rl_internal(expr
->index
, implied
, recurse_cnt
, &idx_rl
))
1520 sval
.value
= type_bytes(type
);
1521 if (sval
.value
<= 0)
1523 bytes_rl
= alloc_rl(sval
, sval
);
1525 offset_rl
= rl_binop(idx_rl
, '*', bytes_rl
);
1530 if (down
->type
== EXPR_OFFSETOF
&& !down
->in
)
1532 if (!get_rl_internal(down
, implied
, recurse_cnt
, &down_rl
))
1535 *res
= rl_binop(offset_rl
, '+', down_rl
);
1543 static bool get_rl_sval(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
, sval_t
*sval_res
)
1545 struct range_list
*rl
= (void *)-1UL;
1546 struct symbol
*type
;
1549 type
= get_type(expr
);
1550 expr
= strip_parens(expr
);
1554 if (++(*recurse_cnt
) >= 200)
1557 switch(expr
->type
) {
1559 case EXPR_FORCE_CAST
:
1560 case EXPR_IMPLIED_CAST
:
1561 handle_cast(expr
, implied
, recurse_cnt
, &rl
, &sval
);
1565 expr
= strip_expr(expr
);
1569 switch (expr
->type
) {
1571 sval
= sval_from_val(expr
, expr
->value
);
1574 sval
= sval_from_fval(expr
, expr
->fvalue
);
1577 handle_preop_rl(expr
, implied
, recurse_cnt
, &rl
, &sval
);
1580 get_rl_sval(expr
->unop
, implied
, recurse_cnt
, &rl
, &sval
);
1583 handle_binop_rl(expr
, implied
, recurse_cnt
, &rl
, &sval
);
1586 handle_comparison_rl(expr
, implied
, recurse_cnt
, &rl
, &sval
);
1589 handle_logical_rl(expr
, implied
, recurse_cnt
, &rl
, &sval
);
1591 case EXPR_PTRSIZEOF
:
1593 sval
= handle_sizeof(expr
);
1596 case EXPR_CONDITIONAL
:
1597 handle_conditional_rl(expr
, implied
, recurse_cnt
, &rl
, &sval
);
1600 handle_call_rl(expr
, implied
, recurse_cnt
, &rl
, &sval
);
1603 if (get_mtag_sval(expr
, &sval
))
1605 if (implied
== RL_EXACT
)
1607 rl
= alloc_rl(valid_ptr_min_sval
, valid_ptr_max_sval
);
1610 handle_offsetof_rl(expr
, implied
, recurse_cnt
, &rl
, &sval
);
1613 evaluate_expression(expr
);
1614 if (expr
->type
== EXPR_VALUE
)
1615 sval
= sval_from_val(expr
, expr
->value
);
1618 handle_variable(expr
, implied
, recurse_cnt
, &rl
, &sval
);
1622 if (rl
== (void *)-1UL)
1625 if (sval
.type
|| (rl
&& rl_to_sval(rl
, &sval
))) {
1629 if (implied
== RL_EXACT
)
1636 if (type
&& (implied
== RL_ABSOLUTE
|| implied
== RL_REAL_ABSOLUTE
) &&
1637 !custom_handle_variable
) {
1638 *res
= alloc_whole_rl(type
);
1644 static bool get_rl_internal(struct expression
*expr
, int implied
, int *recurse_cnt
, struct range_list
**res
)
1646 struct range_list
*rl
= NULL
;
1649 if (!get_rl_sval(expr
, implied
, recurse_cnt
, &rl
, &sval
))
1653 *res
= alloc_rl(sval
, sval
);
1659 static bool get_rl_helper(struct expression
*expr
, int implied
, struct range_list
**res
)
1661 struct range_list
*rl
= NULL
;
1663 int recurse_cnt
= 0;
1665 if (get_value(expr
, &sval
)) {
1666 if (implied
== RL_HARD
) {
1667 if (sval
.uvalue
== INT_MAX
||
1668 sval
.uvalue
== UINT_MAX
||
1669 sval
.uvalue
== LONG_MAX
||
1670 sval
.uvalue
== ULONG_MAX
)
1673 *res
= alloc_rl(sval
, sval
);
1677 if (!get_rl_sval(expr
, implied
, &recurse_cnt
, &rl
, &sval
))
1681 *res
= alloc_rl(sval
, sval
);
1688 struct expression
*expr
;
1690 } cached_results
[24];
1691 static int cache_idx
;
1693 void clear_math_cache(void)
1695 memset(cached_results
, 0, sizeof(cached_results
));
1698 void set_fast_math_only(void)
1703 void clear_fast_math_only(void)
1709 * Don't cache EXPR_VALUE because values are fast already.
1712 static bool get_value_literal(struct expression
*expr
, sval_t
*res_sval
)
1714 struct expression
*tmp
;
1715 int recurse_cnt
= 0;
1717 tmp
= strip_expr(expr
);
1718 if (!tmp
|| tmp
->type
!= EXPR_VALUE
)
1721 return get_rl_sval(expr
, RL_EXACT
, &recurse_cnt
, NULL
, res_sval
);
1724 /* returns 1 if it can get a value literal or else returns 0 */
1725 int get_value(struct expression
*expr
, sval_t
*res_sval
)
1727 struct range_list
*(*orig_custom_fn
)(struct expression
*expr
);
1728 int recurse_cnt
= 0;
1732 if (get_value_literal(expr
, res_sval
))
1736 * This only handles RL_EXACT because other expr statements can be
1737 * different at different points. Like the list iterator, for example.
1739 for (i
= 0; i
< ARRAY_SIZE(cached_results
); i
++) {
1740 if (expr
== cached_results
[i
].expr
) {
1741 if (cached_results
[i
].sval
.type
) {
1742 *res_sval
= cached_results
[i
].sval
;
1749 orig_custom_fn
= custom_handle_variable
;
1750 custom_handle_variable
= NULL
;
1751 get_rl_sval(expr
, RL_EXACT
, &recurse_cnt
, NULL
, &sval
);
1753 custom_handle_variable
= orig_custom_fn
;
1755 cached_results
[cache_idx
].expr
= expr
;
1756 cached_results
[cache_idx
].sval
= sval
;
1757 cache_idx
= (cache_idx
+ 1) % ARRAY_SIZE(cached_results
);
1766 static bool get_implied_value_internal(struct expression
*expr
, int *recurse_cnt
, sval_t
*res_sval
)
1768 struct range_list
*rl
;
1770 res_sval
->type
= NULL
;
1772 if (!get_rl_sval(expr
, RL_IMPLIED
, recurse_cnt
, &rl
, res_sval
))
1774 if (!res_sval
->type
&& !rl_to_sval(rl
, res_sval
))
1779 int get_implied_value(struct expression
*expr
, sval_t
*sval
)
1781 struct range_list
*rl
;
1783 if (!get_rl_helper(expr
, RL_IMPLIED
, &rl
) ||
1784 !rl_to_sval(rl
, sval
))
1789 int get_implied_value_fast(struct expression
*expr
, sval_t
*sval
)
1791 struct range_list
*rl
;
1799 set_fast_math_only();
1800 if (get_rl_helper(expr
, RL_IMPLIED
, &rl
) &&
1801 rl_to_sval(rl
, sval
))
1803 clear_fast_math_only();
1809 int get_implied_min(struct expression
*expr
, sval_t
*sval
)
1811 struct range_list
*rl
;
1813 if (!get_rl_helper(expr
, RL_IMPLIED
, &rl
) || !rl
)
1819 int get_implied_max(struct expression
*expr
, sval_t
*sval
)
1821 struct range_list
*rl
;
1823 if (!get_rl_helper(expr
, RL_IMPLIED
, &rl
) || !rl
)
1829 int get_implied_rl(struct expression
*expr
, struct range_list
**rl
)
1831 if (!get_rl_helper(expr
, RL_IMPLIED
, rl
) || !*rl
)
1836 static int get_absolute_rl_internal(struct expression
*expr
, struct range_list
**rl
, int *recurse_cnt
)
1839 get_rl_internal(expr
, RL_ABSOLUTE
, recurse_cnt
, rl
);
1841 *rl
= alloc_whole_rl(get_type(expr
));
1845 int get_absolute_rl(struct expression
*expr
, struct range_list
**rl
)
1848 get_rl_helper(expr
, RL_ABSOLUTE
, rl
);
1850 *rl
= alloc_whole_rl(get_type(expr
));
1854 int get_real_absolute_rl(struct expression
*expr
, struct range_list
**rl
)
1857 get_rl_helper(expr
, RL_REAL_ABSOLUTE
, rl
);
1859 *rl
= alloc_whole_rl(get_type(expr
));
1863 int custom_get_absolute_rl(struct expression
*expr
,
1864 struct range_list
*(*fn
)(struct expression
*expr
),
1865 struct range_list
**rl
)
1870 custom_handle_variable
= fn
;
1871 ret
= get_rl_helper(expr
, RL_REAL_ABSOLUTE
, rl
);
1872 custom_handle_variable
= NULL
;
1876 int get_implied_rl_var_sym(const char *var
, struct symbol
*sym
, struct range_list
**rl
)
1878 struct smatch_state
*state
;
1880 state
= get_state(SMATCH_EXTRA
, var
, sym
);
1881 *rl
= estate_rl(state
);
1887 int get_hard_max(struct expression
*expr
, sval_t
*sval
)
1889 struct range_list
*rl
;
1891 if (!get_rl_helper(expr
, RL_HARD
, &rl
) || !rl
)
1897 int get_fuzzy_min(struct expression
*expr
, sval_t
*sval
)
1899 struct range_list
*rl
;
1902 if (!get_rl_helper(expr
, RL_FUZZY
, &rl
) || !rl
)
1905 if (sval_is_negative(tmp
) && sval_is_min(tmp
))
1911 int get_fuzzy_max(struct expression
*expr
, sval_t
*sval
)
1913 struct range_list
*rl
;
1916 if (!get_rl_helper(expr
, RL_FUZZY
, &rl
) || !rl
)
1919 if (max
.uvalue
> INT_MAX
- 10000)
1925 int get_absolute_min(struct expression
*expr
, sval_t
*sval
)
1927 struct range_list
*rl
;
1928 struct symbol
*type
;
1930 type
= get_type(expr
);
1932 type
= &llong_ctype
; // FIXME: this is wrong but places assume get type can't fail.
1934 get_rl_helper(expr
, RL_REAL_ABSOLUTE
, &rl
);
1938 *sval
= sval_type_min(type
);
1940 if (sval_cmp(*sval
, sval_type_min(type
)) < 0)
1941 *sval
= sval_type_min(type
);
1945 int get_absolute_max(struct expression
*expr
, sval_t
*sval
)
1947 struct range_list
*rl
;
1948 struct symbol
*type
;
1950 type
= get_type(expr
);
1952 type
= &llong_ctype
;
1954 get_rl_helper(expr
, RL_REAL_ABSOLUTE
, &rl
);
1958 *sval
= sval_type_max(type
);
1960 if (sval_cmp(sval_type_max(type
), *sval
) < 0)
1961 *sval
= sval_type_max(type
);
1965 int known_condition_true(struct expression
*expr
)
1972 if (__inline_fn
&& get_param_num(expr
) >= 0) {
1973 if (get_implied_value(expr
, &tmp
) && tmp
.value
)
1978 if (get_value(expr
, &tmp
) && tmp
.value
)
1984 int known_condition_false(struct expression
*expr
)
1991 if (__inline_fn
&& get_param_num(expr
) >= 0) {
1992 if (get_implied_value(expr
, &tmp
) && tmp
.value
== 0)
1997 if (expr_is_zero(expr
))
2003 int implied_condition_true(struct expression
*expr
)
2010 if (known_condition_true(expr
))
2012 if (get_implied_value(expr
, &tmp
) && tmp
.value
)
2015 if (expr
->type
== EXPR_POSTOP
)
2016 return implied_condition_true(expr
->unop
);
2018 if (expr
->type
== EXPR_PREOP
&& expr
->op
== SPECIAL_DECREMENT
)
2019 return implied_not_equal(expr
->unop
, 1);
2020 if (expr
->type
== EXPR_PREOP
&& expr
->op
== SPECIAL_INCREMENT
)
2021 return implied_not_equal(expr
->unop
, -1);
2023 expr
= strip_expr(expr
);
2024 switch (expr
->type
) {
2026 if (do_comparison(expr
) == 1)
2030 if (expr
->op
== '!') {
2031 if (implied_condition_false(expr
->unop
))
2037 if (implied_not_equal(expr
, 0) == 1)
2044 int implied_condition_false(struct expression
*expr
)
2046 struct expression
*tmp
;
2052 if (known_condition_false(expr
))
2055 switch (expr
->type
) {
2057 if (do_comparison(expr
) == 2)
2060 if (expr
->op
== '!') {
2061 if (implied_condition_true(expr
->unop
))
2065 tmp
= strip_expr(expr
);
2067 return implied_condition_false(tmp
);
2070 if (get_implied_value(expr
, &sval
) && sval
.value
== 0)