1 //===-- tsan_dense_alloc.h --------------------------------------*- C++ -*-===//
3 // This file is distributed under the University of Illinois Open Source
4 // License. See LICENSE.TXT for details.
6 //===----------------------------------------------------------------------===//
8 // This file is a part of ThreadSanitizer (TSan), a race detector.
10 // A DenseSlabAlloc is a freelist-based allocator of fixed-size objects.
11 // DenseSlabAllocCache is a thread-local cache for DenseSlabAlloc.
12 // The only difference with traditional slab allocators is that DenseSlabAlloc
13 // allocates/free indices of objects and provide a functionality to map
14 // the index onto the real pointer. The index is u32, that is, 2 times smaller
15 // than uptr (hense the Dense prefix).
16 //===----------------------------------------------------------------------===//
17 #ifndef TSAN_DENSE_ALLOC_H
18 #define TSAN_DENSE_ALLOC_H
20 #include "sanitizer_common/sanitizer_common.h"
21 #include "tsan_defs.h"
22 #include "tsan_mutex.h"
26 class DenseSlabAllocCache
{
27 static const uptr kSize
= 128;
31 template<typename T
, uptr kL1Size
, uptr kL2Size
> friend class DenseSlabAlloc
;
34 template<typename T
, uptr kL1Size
, uptr kL2Size
>
35 class DenseSlabAlloc
{
37 typedef DenseSlabAllocCache Cache
;
38 typedef typename
Cache::IndexT IndexT
;
41 // Check that kL1Size and kL2Size are sane.
42 CHECK_EQ(kL1Size
& (kL1Size
- 1), 0);
43 CHECK_EQ(kL2Size
& (kL2Size
- 1), 0);
44 CHECK_GE(1ull << (sizeof(IndexT
) * 8), kL1Size
* kL2Size
);
45 // Check that it makes sense to use the dense alloc.
46 CHECK_GE(sizeof(T
), sizeof(IndexT
));
47 internal_memset(map_
, 0, sizeof(map_
));
53 for (uptr i
= 0; i
< kL1Size
; i
++) {
55 UnmapOrDie(map_
[i
], kL2Size
* sizeof(T
));
59 IndexT
Alloc(Cache
*c
) {
62 return c
->cache
[--c
->pos
];
65 void Free(Cache
*c
, IndexT idx
) {
67 if (c
->pos
== Cache::kSize
)
69 c
->cache
[c
->pos
++] = idx
;
74 DCHECK_LE(idx
, kL1Size
* kL2Size
);
75 return &map_
[idx
/ kL2Size
][idx
% kL2Size
];
78 void FlushCache(Cache
*c
) {
79 SpinMutexLock
lock(&mtx_
);
81 IndexT idx
= c
->cache
[--c
->pos
];
82 *(IndexT
*)Map(idx
) = freelist_
;
87 void InitCache(Cache
*c
) {
89 internal_memset(c
->cache
, 0, sizeof(c
->cache
));
98 void Refill(Cache
*c
) {
99 SpinMutexLock
lock(&mtx_
);
100 if (freelist_
== 0) {
101 if (fillpos_
== kL1Size
) {
102 Printf("ThreadSanitizer: DenseSlabAllocator overflow. Dying.\n");
105 T
*batch
= (T
*)MmapOrDie(kL2Size
* sizeof(T
), "DenseSlabAllocator");
106 // Reserve 0 as invalid index.
107 IndexT start
= fillpos_
== 0 ? 1 : 0;
108 for (IndexT i
= start
; i
< kL2Size
; i
++) {
110 *(IndexT
*)(batch
+ i
) = i
+ 1 + fillpos_
* kL2Size
;
112 *(IndexT
*)(batch
+ kL2Size
- 1) = 0;
113 freelist_
= fillpos_
* kL2Size
+ start
;
114 map_
[fillpos_
++] = batch
;
116 for (uptr i
= 0; i
< Cache::kSize
/ 2 && freelist_
!= 0; i
++) {
117 IndexT idx
= freelist_
;
118 c
->cache
[c
->pos
++] = idx
;
119 freelist_
= *(IndexT
*)Map(idx
);
123 void Drain(Cache
*c
) {
124 SpinMutexLock
lock(&mtx_
);
125 for (uptr i
= 0; i
< Cache::kSize
/ 2; i
++) {
126 IndexT idx
= c
->cache
[--c
->pos
];
127 *(IndexT
*)Map(idx
) = freelist_
;
133 } // namespace __tsan
135 #endif // TSAN_DENSE_ALLOC_H