l3codeca.acm: Avoid mpg123 functions with suffix.
[wine.git] / dlls / concrt140 / details.c
blob72bb744744c06994a9a59ff27f9e547f36d268b8
1 /*
2 * Copyright 2010 Piotr Caban for CodeWeavers
4 * This library is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Lesser General Public
6 * License as published by the Free Software Foundation; either
7 * version 2.1 of the License, or (at your option) any later version.
9 * This library is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Lesser General Public License for more details.
14 * You should have received a copy of the GNU Lesser General Public
15 * License along with this library; if not, write to the Free Software
16 * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA
19 /* Keep in sync with msvcp90/detail.c */
21 #include <stdarg.h>
23 #include "wine/debug.h"
24 #include "wine/exception.h"
25 #include "details.h"
27 WINE_DEFAULT_DEBUG_CHANNEL(msvcp);
29 typedef struct _Page
31 struct _Page *_Next;
32 size_t _Mask;
33 char data[1];
34 } _Page;
36 typedef struct
38 LONG lock;
39 _Page *head;
40 _Page *tail;
41 size_t head_pos;
42 size_t tail_pos;
43 } threadsafe_queue;
45 #define QUEUES_NO 8
46 typedef struct
48 size_t tail_pos;
49 size_t head_pos;
50 threadsafe_queue queues[QUEUES_NO];
51 } queue_data;
53 typedef struct
55 const vtable_ptr *vtable;
56 queue_data *data; /* queue_data structure is not binary compatible */
57 size_t alloc_count;
58 size_t item_size;
59 } _Concurrent_queue_base_v4;
61 extern const vtable_ptr _Concurrent_queue_base_v4_vtable;
62 #define call__Concurrent_queue_base_v4__Move_item(this,dst,idx,src) CALL_VTBL_FUNC(this, \
63 0, void, (_Concurrent_queue_base_v4*,_Page*,size_t,void*), (this,dst,idx,src))
64 #define call__Concurrent_queue_base_v4__Copy_item(this,dst,idx,src) CALL_VTBL_FUNC(this, \
65 4, void, (_Concurrent_queue_base_v4*,_Page*,size_t,const void*), (this,dst,idx,src))
66 #define call__Concurrent_queue_base_v4__Assign_and_destroy_item(this,dst,src,idx) CALL_VTBL_FUNC(this, \
67 8, void, (_Concurrent_queue_base_v4*,void*,_Page*,size_t), (this,dst,src,idx))
68 #define call__Concurrent_queue_base_v4__Allocate_page(this) CALL_VTBL_FUNC(this, \
69 16, _Page*, (_Concurrent_queue_base_v4*), (this))
70 #define call__Concurrent_queue_base_v4__Deallocate_page(this, page) CALL_VTBL_FUNC(this, \
71 20, void, (_Concurrent_queue_base_v4*,_Page*), (this,page))
73 /* ?_Internal_throw_exception@_Concurrent_queue_base_v4@details@Concurrency@@IBEXXZ */
74 /* ?_Internal_throw_exception@_Concurrent_queue_base_v4@details@Concurrency@@IEBAXXZ */
75 DEFINE_THISCALL_WRAPPER(_Concurrent_queue_base_v4__Internal_throw_exception, 4)
76 void __thiscall _Concurrent_queue_base_v4__Internal_throw_exception(
77 const _Concurrent_queue_base_v4 *this)
79 TRACE("(%p)\n", this);
80 _Xmem();
83 /* ??0_Concurrent_queue_base_v4@details@Concurrency@@IAE@I@Z */
84 /* ??0_Concurrent_queue_base_v4@details@Concurrency@@IEAA@_K@Z */
85 DEFINE_THISCALL_WRAPPER(_Concurrent_queue_base_v4_ctor, 8)
86 _Concurrent_queue_base_v4* __thiscall _Concurrent_queue_base_v4_ctor(
87 _Concurrent_queue_base_v4 *this, size_t size)
89 TRACE("(%p %Iu)\n", this, size);
91 this->data = operator_new(sizeof(*this->data));
92 memset(this->data, 0, sizeof(*this->data));
94 this->vtable = &_Concurrent_queue_base_v4_vtable;
95 this->item_size = size;
97 /* alloc_count needs to be power of 2 */
98 this->alloc_count =
99 size <= 8 ? 32 :
100 size <= 16 ? 16 :
101 size <= 32 ? 8 :
102 size <= 64 ? 4 :
103 size <= 128 ? 2 : 1;
104 return this;
107 /* ??1_Concurrent_queue_base_v4@details@Concurrency@@MAE@XZ */
108 /* ??1_Concurrent_queue_base_v4@details@Concurrency@@MEAA@XZ */
109 DEFINE_THISCALL_WRAPPER(_Concurrent_queue_base_v4_dtor, 4)
110 void __thiscall _Concurrent_queue_base_v4_dtor(_Concurrent_queue_base_v4 *this)
112 TRACE("(%p)\n", this);
113 operator_delete(this->data);
116 DEFINE_THISCALL_WRAPPER(_Concurrent_queue_base_v4_vector_dtor, 8)
117 _Concurrent_queue_base_v4* __thiscall _Concurrent_queue_base_v4_vector_dtor(
118 _Concurrent_queue_base_v4 *this, unsigned int flags)
120 TRACE("(%p %x)\n", this, flags);
121 if(flags & 2) {
122 /* we have an array, with the number of elements stored before the first object */
123 INT_PTR i, *ptr = (INT_PTR *)this-1;
125 for(i=*ptr-1; i>=0; i--)
126 _Concurrent_queue_base_v4_dtor(this+i);
127 operator_delete(ptr);
128 } else {
129 if(flags & 1)
130 _Concurrent_queue_base_v4_dtor(this);
131 operator_delete(this);
134 return this;
137 /* ?_Internal_finish_clear@_Concurrent_queue_base_v4@details@Concurrency@@IAEXXZ */
138 /* ?_Internal_finish_clear@_Concurrent_queue_base_v4@details@Concurrency@@IEAAXXZ */
139 DEFINE_THISCALL_WRAPPER(_Concurrent_queue_base_v4__Internal_finish_clear, 4)
140 void __thiscall _Concurrent_queue_base_v4__Internal_finish_clear(
141 _Concurrent_queue_base_v4 *this)
143 int i;
145 TRACE("(%p)\n", this);
147 for(i=0; i<QUEUES_NO; i++)
149 if(this->data->queues[i].tail)
150 call__Concurrent_queue_base_v4__Deallocate_page(this, this->data->queues[i].tail);
154 /* ?_Internal_empty@_Concurrent_queue_base_v4@details@Concurrency@@IBE_NXZ */
155 /* ?_Internal_empty@_Concurrent_queue_base_v4@details@Concurrency@@IEBA_NXZ */
156 DEFINE_THISCALL_WRAPPER(_Concurrent_queue_base_v4__Internal_empty, 4)
157 bool __thiscall _Concurrent_queue_base_v4__Internal_empty(
158 const _Concurrent_queue_base_v4 *this)
160 TRACE("(%p)\n", this);
161 return this->data->head_pos == this->data->tail_pos;
164 /* ?_Internal_size@_Concurrent_queue_base_v4@details@Concurrency@@IBEIXZ */
165 /* ?_Internal_size@_Concurrent_queue_base_v4@details@Concurrency@@IEBA_KXZ */
166 DEFINE_THISCALL_WRAPPER(_Concurrent_queue_base_v4__Internal_size, 4)
167 size_t __thiscall _Concurrent_queue_base_v4__Internal_size(
168 const _Concurrent_queue_base_v4 *this)
170 TRACE("(%p)\n", this);
171 return this->data->tail_pos - this->data->head_pos;
174 static void spin_wait(int *counter)
176 static int spin_limit = -1;
178 if(spin_limit == -1)
180 SYSTEM_INFO si;
181 GetSystemInfo(&si);
182 spin_limit = si.dwNumberOfProcessors>1 ? 4000 : 0;
185 if(*counter >= spin_limit)
187 *counter = 0;
188 Sleep(0);
190 else
192 (*counter)++;
196 #ifdef _WIN64
197 static size_t InterlockedIncrementSizeT(size_t volatile *dest)
199 size_t v;
203 v = *dest;
204 } while(InterlockedCompareExchange64((LONGLONG*)dest, v+1, v) != v);
206 return v+1;
208 #else
209 #define InterlockedIncrementSizeT(dest) InterlockedIncrement((LONG*)dest)
210 #endif
212 static void CALLBACK queue_push_finally(BOOL normal, void *ctx)
214 threadsafe_queue *queue = ctx;
215 InterlockedIncrementSizeT(&queue->tail_pos);
218 static void threadsafe_queue_push(threadsafe_queue *queue, size_t id,
219 void *e, _Concurrent_queue_base_v4 *parent, BOOL copy)
221 size_t page_id = id & ~(parent->alloc_count-1);
222 int spin;
223 _Page *p;
225 spin = 0;
226 while(queue->tail_pos != id)
227 spin_wait(&spin);
229 if(page_id == id)
231 /* TODO: Add exception handling */
232 p = call__Concurrent_queue_base_v4__Allocate_page(parent);
233 p->_Next = NULL;
234 p->_Mask = 0;
236 spin = 0;
237 while(InterlockedCompareExchange(&queue->lock, 1, 0))
238 spin_wait(&spin);
239 if(queue->tail)
240 queue->tail->_Next = p;
241 queue->tail = p;
242 if(!queue->head)
243 queue->head = p;
244 queue->lock = 0;
246 else
248 p = queue->tail;
251 __TRY
253 if(copy)
254 call__Concurrent_queue_base_v4__Copy_item(parent, p, id-page_id, e);
255 else
256 call__Concurrent_queue_base_v4__Move_item(parent, p, id-page_id, e);
257 p->_Mask |= 1 << (id - page_id);
259 __FINALLY_CTX(queue_push_finally, queue);
262 static BOOL threadsafe_queue_pop(threadsafe_queue *queue, size_t id,
263 void *e, _Concurrent_queue_base_v4 *parent)
265 size_t page_id = id & ~(parent->alloc_count-1);
266 int spin;
267 _Page *p;
268 BOOL ret = FALSE;
270 spin = 0;
271 while(queue->tail_pos <= id)
272 spin_wait(&spin);
274 spin = 0;
275 while(queue->head_pos != id)
276 spin_wait(&spin);
278 p = queue->head;
279 if(p->_Mask & (1 << (id-page_id)))
281 /* TODO: Add exception handling */
282 call__Concurrent_queue_base_v4__Assign_and_destroy_item(parent, e, p, id-page_id);
283 ret = TRUE;
286 if(id == page_id+parent->alloc_count-1)
288 spin = 0;
289 while(InterlockedCompareExchange(&queue->lock, 1, 0))
290 spin_wait(&spin);
291 queue->head = p->_Next;
292 if(!queue->head)
293 queue->tail = NULL;
294 queue->lock = 0;
296 /* TODO: Add exception handling */
297 call__Concurrent_queue_base_v4__Deallocate_page(parent, p);
300 InterlockedIncrementSizeT(&queue->head_pos);
301 return ret;
304 /* ?_Internal_push@_Concurrent_queue_base_v4@details@Concurrency@@IAEXPBX@Z */
305 /* ?_Internal_push@_Concurrent_queue_base_v4@details@Concurrency@@IEAAXPEBX@Z */
306 DEFINE_THISCALL_WRAPPER(_Concurrent_queue_base_v4__Internal_push, 8)
307 void __thiscall _Concurrent_queue_base_v4__Internal_push(
308 _Concurrent_queue_base_v4 *this, void *e)
310 size_t id;
312 TRACE("(%p %p)\n", this, e);
314 id = InterlockedIncrementSizeT(&this->data->tail_pos)-1;
315 threadsafe_queue_push(this->data->queues + id % QUEUES_NO,
316 id / QUEUES_NO, e, this, TRUE);
319 /* ?_Internal_move_push@_Concurrent_queue_base_v4@details@Concurrency@@IAEXPAX@Z */
320 /* ?_Internal_move_push@_Concurrent_queue_base_v4@details@Concurrency@@IEAAXPEAX@Z */
321 DEFINE_THISCALL_WRAPPER(_Concurrent_queue_base_v4__Internal_move_push, 8)
322 void __thiscall _Concurrent_queue_base_v4__Internal_move_push(
323 _Concurrent_queue_base_v4 *this, void *e)
325 size_t id;
327 TRACE("(%p %p)\n", this, e);
329 id = InterlockedIncrementSizeT(&this->data->tail_pos)-1;
330 threadsafe_queue_push(this->data->queues + id % QUEUES_NO,
331 id / QUEUES_NO, e, this, FALSE);
334 /* ?_Internal_pop_if_present@_Concurrent_queue_base_v4@details@Concurrency@@IAE_NPAX@Z */
335 /* ?_Internal_pop_if_present@_Concurrent_queue_base_v4@details@Concurrency@@IEAA_NPEAX@Z */
336 DEFINE_THISCALL_WRAPPER(_Concurrent_queue_base_v4__Internal_pop_if_present, 8)
337 bool __thiscall _Concurrent_queue_base_v4__Internal_pop_if_present(
338 _Concurrent_queue_base_v4 *this, void *e)
340 size_t id;
342 TRACE("(%p %p)\n", this, e);
348 id = this->data->head_pos;
349 if(id == this->data->tail_pos) return FALSE;
350 } while(InterlockedCompareExchangePointer((void**)&this->data->head_pos,
351 (void*)(id+1), (void*)id) != (void*)id);
352 } while(!threadsafe_queue_pop(this->data->queues + id % QUEUES_NO,
353 id / QUEUES_NO, e, this));
354 return TRUE;
357 /* ?_Internal_swap@_Concurrent_queue_base_v4@details@Concurrency@@IAEXAAV123@@Z */
358 /* ?_Internal_swap@_Concurrent_queue_base_v4@details@Concurrency@@IEAAXAEAV123@@Z */
359 DEFINE_THISCALL_WRAPPER(_Concurrent_queue_base_v4__Internal_swap, 8)
360 void __thiscall _Concurrent_queue_base_v4__Internal_swap(
361 _Concurrent_queue_base_v4 *this, _Concurrent_queue_base_v4 *r)
363 FIXME("(%p %p) stub\n", this, r);
366 DEFINE_THISCALL_WRAPPER(_Concurrent_queue_base_v4_dummy, 4)
367 void __thiscall _Concurrent_queue_base_v4_dummy(_Concurrent_queue_base_v4 *this)
369 ERR("unexpected call\n");
372 DEFINE_RTTI_DATA0(_Concurrent_queue_base_v4, 0, ".?AV_Concurrent_queue_base_v4@details@Concurrency@@")
374 static LONG _Runtime_object_id;
376 typedef struct
378 const vtable_ptr *vtable;
379 int id;
380 } _Runtime_object;
382 extern const vtable_ptr _Runtime_object_vtable;
384 /* ??0_Runtime_object@details@Concurrency@@QAE@H@Z */
385 /* ??0_Runtime_object@details@Concurrency@@QEAA@H@Z */
386 DEFINE_THISCALL_WRAPPER(_Runtime_object_ctor_id, 8)
387 _Runtime_object* __thiscall _Runtime_object_ctor_id(_Runtime_object *this, int id)
389 TRACE("(%p %d)\n", this, id);
390 this->vtable = &_Runtime_object_vtable;
391 this->id = id;
392 return this;
395 /* ??0_Runtime_object@details@Concurrency@@QAE@XZ */
396 /* ??0_Runtime_object@details@Concurrency@@QEAA@XZ */
397 DEFINE_THISCALL_WRAPPER(_Runtime_object_ctor, 4)
398 _Runtime_object* __thiscall _Runtime_object_ctor(_Runtime_object *this)
400 TRACE("(%p)\n", this);
401 this->vtable = &_Runtime_object_vtable;
402 this->id = InterlockedExchangeAdd(&_Runtime_object_id, 2);
403 return this;
406 DEFINE_THISCALL_WRAPPER(_Runtime_object__GetId, 4)
407 int __thiscall _Runtime_object__GetId(_Runtime_object *this)
409 TRACE("(%p)\n", this);
410 return this->id;
413 DEFINE_RTTI_DATA0(_Runtime_object, 0, ".?AV_Runtime_object@details@Concurrency@@")
415 typedef struct __Concurrent_vector_base_v4
417 void* (__cdecl *allocator)(struct __Concurrent_vector_base_v4 *, size_t);
418 void *storage[3];
419 size_t first_block;
420 size_t early_size;
421 void **segment;
422 } _Concurrent_vector_base_v4;
424 #define STORAGE_SIZE ARRAY_SIZE(this->storage)
425 #define SEGMENT_SIZE (sizeof(void*) * 8)
427 typedef struct compact_block
429 size_t first_block;
430 void *blocks[SEGMENT_SIZE];
431 int size_check;
432 }compact_block;
434 /* Return the integer base-2 logarithm of (x|1). Result is 0 for x == 0. */
435 static inline unsigned int log2i(unsigned int x)
437 ULONG index;
438 BitScanReverse(&index, x|1);
439 return index;
442 /* ?_Segment_index_of@_Concurrent_vector_base_v4@details@Concurrency@@KAII@Z */
443 /* ?_Segment_index_of@_Concurrent_vector_base_v4@details@Concurrency@@KA_K_K@Z */
444 size_t __cdecl _vector_base_v4__Segment_index_of(size_t x)
446 unsigned int half;
448 TRACE("(%Iu)\n", x);
450 if((sizeof(x) == 8) && (half = x >> 32))
451 return log2i(half) + 32;
453 return log2i(x);
456 /* ?_Internal_throw_exception@_Concurrent_vector_base_v4@details@Concurrency@@IBEXI@Z */
457 /* ?_Internal_throw_exception@_Concurrent_vector_base_v4@details@Concurrency@@IEBAX_K@Z */
458 DEFINE_THISCALL_WRAPPER(_vector_base_v4__Internal_throw_exception, 8)
459 void __thiscall _vector_base_v4__Internal_throw_exception(void/*_vector_base_v4*/ *this, size_t idx)
461 TRACE("(%p %Iu)\n", this, idx);
463 switch(idx) {
464 case 0: _Xout_of_range("Index out of range");
465 case 1: _Xout_of_range("Index out of segments table range");
466 case 2: throw_range_error("Index is inside segment which failed to be allocated");
470 #ifdef _WIN64
471 #define InterlockedCompareExchangeSizeT(dest, exchange, cmp) InterlockedCompareExchangeSize((size_t *)dest, (size_t)exchange, (size_t)cmp)
472 static size_t InterlockedCompareExchangeSize(size_t volatile *dest, size_t exchange, size_t cmp)
474 size_t v;
476 v = InterlockedCompareExchange64((LONGLONG*)dest, exchange, cmp);
478 return v;
480 #else
481 #define InterlockedCompareExchangeSizeT(dest, exchange, cmp) InterlockedCompareExchange((LONG*)dest, (size_t)exchange, (size_t)cmp)
482 #endif
484 #define SEGMENT_ALLOC_MARKER ((void*)1)
486 static void concurrent_vector_alloc_segment(_Concurrent_vector_base_v4 *this,
487 size_t seg, size_t element_size)
489 int spin;
491 while(!this->segment[seg] || this->segment[seg] == SEGMENT_ALLOC_MARKER)
493 spin = 0;
494 while(this->segment[seg] == SEGMENT_ALLOC_MARKER)
495 spin_wait(&spin);
496 if(!InterlockedCompareExchangeSizeT((this->segment + seg),
497 SEGMENT_ALLOC_MARKER, 0))
498 __TRY
500 if(seg == 0)
501 this->segment[seg] = this->allocator(this, element_size * (1 << this->first_block));
502 else if(seg < this->first_block)
503 this->segment[seg] = (BYTE**)this->segment[0]
504 + element_size * (1 << seg);
505 else
506 this->segment[seg] = this->allocator(this, element_size * (1 << seg));
508 __EXCEPT_ALL
510 this->segment[seg] = NULL;
511 _CxxThrowException(NULL, NULL);
513 __ENDTRY
514 if(!this->segment[seg])
515 _vector_base_v4__Internal_throw_exception(this, 2);
519 /* ??1_Concurrent_vector_base_v4@details@Concurrency@@IAE@XZ */
520 /* ??1_Concurrent_vector_base_v4@details@Concurrency@@IEAA@XZ */
521 DEFINE_THISCALL_WRAPPER(_Concurrent_vector_base_v4_dtor, 4)
522 void __thiscall _Concurrent_vector_base_v4_dtor(
523 _Concurrent_vector_base_v4 *this)
525 TRACE("(%p)\n", this);
527 if(this->segment != this->storage)
528 free(this->segment);
531 /* ?_Internal_capacity@_Concurrent_vector_base_v4@details@Concurrency@@IBEIXZ */
532 /* ?_Internal_capacity@_Concurrent_vector_base_v4@details@Concurrency@@IEBA_KXZ */
533 DEFINE_THISCALL_WRAPPER(_Concurrent_vector_base_v4__Internal_capacity, 4)
534 size_t __thiscall _Concurrent_vector_base_v4__Internal_capacity(
535 const _Concurrent_vector_base_v4 *this)
537 size_t last_block;
538 int i;
540 TRACE("(%p)\n", this);
542 last_block = (this->segment == this->storage ? STORAGE_SIZE : SEGMENT_SIZE);
543 for(i = 0; i < last_block; i++)
545 if(!this->segment[i])
546 return !i ? 0 : 1 << i;
548 return 1 << i;
551 /* ?_Internal_reserve@_Concurrent_vector_base_v4@details@Concurrency@@IAEXIII@Z */
552 /* ?_Internal_reserve@_Concurrent_vector_base_v4@details@Concurrency@@IEAAX_K00@Z */
553 DEFINE_THISCALL_WRAPPER(_Concurrent_vector_base_v4__Internal_reserve, 16)
554 void __thiscall _Concurrent_vector_base_v4__Internal_reserve(
555 _Concurrent_vector_base_v4 *this, size_t size,
556 size_t element_size, size_t max_size)
558 size_t block_idx, capacity;
559 int i;
560 void **new_segment;
562 TRACE("(%p %Iu %Iu %Iu)\n", this, size, element_size, max_size);
564 if(size > max_size) _vector_base_v4__Internal_throw_exception(this, 0);
565 capacity = _Concurrent_vector_base_v4__Internal_capacity(this);
566 if(size <= capacity) return;
567 block_idx = _vector_base_v4__Segment_index_of(size - 1);
568 if(!this->first_block)
569 InterlockedCompareExchangeSizeT(&this->first_block, block_idx + 1, 0);
570 i = _vector_base_v4__Segment_index_of(capacity);
571 if(this->storage == this->segment) {
572 for(; i <= block_idx && i < STORAGE_SIZE; i++)
573 concurrent_vector_alloc_segment(this, i, element_size);
574 if(block_idx >= STORAGE_SIZE) {
575 new_segment = malloc(SEGMENT_SIZE * sizeof(void*));
576 if(new_segment == NULL) _vector_base_v4__Internal_throw_exception(this, 2);
577 memset(new_segment, 0, SEGMENT_SIZE * sizeof(*new_segment));
578 memcpy(new_segment, this->storage, STORAGE_SIZE * sizeof(*new_segment));
579 if(InterlockedCompareExchangePointer((void*)&this->segment, new_segment,
580 this->storage) != this->storage)
581 free(new_segment);
584 for(; i <= block_idx; i++)
585 concurrent_vector_alloc_segment(this, i, element_size);
588 /* ?_Internal_clear@_Concurrent_vector_base_v4@details@Concurrency@@IAEIP6AXPAXI@Z@Z */
589 /* ?_Internal_clear@_Concurrent_vector_base_v4@details@Concurrency@@IEAA_KP6AXPEAX_K@Z@Z */
590 DEFINE_THISCALL_WRAPPER(_Concurrent_vector_base_v4__Internal_clear, 8)
591 size_t __thiscall _Concurrent_vector_base_v4__Internal_clear(
592 _Concurrent_vector_base_v4 *this, void (__cdecl *clear)(void*, size_t))
594 size_t seg_no, elems;
595 int i;
597 TRACE("(%p %p)\n", this, clear);
599 seg_no = this->early_size ? _vector_base_v4__Segment_index_of(this->early_size) + 1 : 0;
600 for(i = seg_no - 1; i >= 0; i--) {
601 elems = this->early_size - (1 << i & ~1);
602 clear(this->segment[i], elems);
603 this->early_size -= elems;
605 while(seg_no < (this->segment == this->storage ? STORAGE_SIZE : SEGMENT_SIZE)) {
606 if(!this->segment[seg_no]) break;
607 seg_no++;
609 return seg_no;
612 /* ?_Internal_compact@_Concurrent_vector_base_v4@details@Concurrency@@IAEPAXIPAXP6AX0I@ZP6AX0PBXI@Z@Z */
613 /* ?_Internal_compact@_Concurrent_vector_base_v4@details@Concurrency@@IEAAPEAX_KPEAXP6AX10@ZP6AX1PEBX0@Z@Z */
614 DEFINE_THISCALL_WRAPPER(_Concurrent_vector_base_v4__Internal_compact, 20)
615 void * __thiscall _Concurrent_vector_base_v4__Internal_compact(
616 _Concurrent_vector_base_v4 *this, size_t element_size, void *v,
617 void (__cdecl *clear)(void*, size_t),
618 void (__cdecl *copy)(void*, const void*, size_t))
620 compact_block *b;
621 size_t size, alloc_size, seg_no, alloc_seg, copy_element, clear_element;
622 int i;
624 TRACE("(%p %Iu %p %p %p)\n", this, element_size, v, clear, copy);
626 size = this->early_size;
627 alloc_size = _Concurrent_vector_base_v4__Internal_capacity(this);
628 if(alloc_size == 0) return NULL;
629 alloc_seg = _vector_base_v4__Segment_index_of(alloc_size - 1);
630 if(!size) {
631 this->first_block = 0;
632 b = v;
633 b->first_block = alloc_seg + 1;
634 memset(b->blocks, 0, sizeof(b->blocks));
635 memcpy(b->blocks, this->segment,
636 (alloc_seg + 1) * sizeof(this->segment[0]));
637 memset(this->segment, 0, sizeof(this->segment[0]) * (alloc_seg + 1));
638 return v;
640 seg_no = _vector_base_v4__Segment_index_of(size - 1);
641 if(this->first_block == (seg_no + 1) && seg_no == alloc_seg) return NULL;
642 b = v;
643 b->first_block = this->first_block;
644 memset(b->blocks, 0, sizeof(b->blocks));
645 memcpy(b->blocks, this->segment,
646 (alloc_seg + 1) * sizeof(this->segment[0]));
647 if(this->first_block == (seg_no + 1) && seg_no != alloc_seg) {
648 memset(b->blocks, 0, sizeof(b->blocks[0]) * (seg_no + 1));
649 memset(&this->segment[seg_no + 1], 0, sizeof(this->segment[0]) * (alloc_seg - seg_no));
650 return v;
652 memset(this->segment, 0,
653 (alloc_seg + 1) * sizeof(this->segment[0]));
654 this->first_block = 0;
655 _Concurrent_vector_base_v4__Internal_reserve(this, size, element_size,
656 ~(size_t)0 / element_size);
657 for(i = 0; i < seg_no; i++)
658 copy(this->segment[i], b->blocks[i], i ? 1 << i : 2);
659 copy_element = size - ((1 << seg_no) & ~1);
660 if(copy_element > 0)
661 copy(this->segment[seg_no], b->blocks[seg_no], copy_element);
662 for(i = 0; i < seg_no; i++)
663 clear(b->blocks[i], i ? 1 << i : 2);
664 clear_element = size - ((1 << seg_no) & ~1);
665 if(clear_element > 0)
666 clear(b->blocks[seg_no], clear_element);
667 return v;
670 /* ?_Internal_copy@_Concurrent_vector_base_v4@details@Concurrency@@IAEXABV123@IP6AXPAXPBXI@Z@Z */
671 /* ?_Internal_copy@_Concurrent_vector_base_v4@details@Concurrency@@IEAAXAEBV123@_KP6AXPEAXPEBX1@Z@Z */
672 DEFINE_THISCALL_WRAPPER(_Concurrent_vector_base_v4__Internal_copy, 16)
673 void __thiscall _Concurrent_vector_base_v4__Internal_copy(
674 _Concurrent_vector_base_v4 *this, const _Concurrent_vector_base_v4 *v,
675 size_t element_size, void (__cdecl *copy)(void*, const void*, size_t))
677 size_t seg_no, v_size;
678 int i;
680 TRACE("(%p %p %Iu %p)\n", this, v, element_size, copy);
682 v_size = v->early_size;
683 if(!v_size) {
684 this->early_size = 0;
685 return;
687 _Concurrent_vector_base_v4__Internal_reserve(this, v_size,
688 element_size, ~(size_t)0 / element_size);
689 seg_no = _vector_base_v4__Segment_index_of(v_size - 1);
690 for(i = 0; i < seg_no; i++)
691 copy(this->segment[i], v->segment[i], i ? 1 << i : 2);
692 copy(this->segment[i], v->segment[i], v_size - (1 << i & ~1));
693 this->early_size = v_size;
696 /* ?_Internal_assign@_Concurrent_vector_base_v4@details@Concurrency@@IAEXABV123@IP6AXPAXI@ZP6AX1PBXI@Z4@Z */
697 /* ?_Internal_assign@_Concurrent_vector_base_v4@details@Concurrency@@IEAAXAEBV123@_KP6AXPEAX1@ZP6AX2PEBX1@Z5@Z */
698 DEFINE_THISCALL_WRAPPER(_Concurrent_vector_base_v4__Internal_assign, 24)
699 void __thiscall _Concurrent_vector_base_v4__Internal_assign(
700 _Concurrent_vector_base_v4 *this, const _Concurrent_vector_base_v4 *v,
701 size_t element_size, void (__cdecl *clear)(void*, size_t),
702 void (__cdecl *assign)(void*, const void*, size_t),
703 void (__cdecl *copy)(void*, const void*, size_t))
705 size_t v_size, seg_no, v_seg_no, remain_element;
706 int i;
708 TRACE("(%p %p %Iu %p %p %p)\n", this, v, element_size, clear, assign, copy);
710 v_size = v->early_size;
711 if(!v_size) {
712 _Concurrent_vector_base_v4__Internal_clear(this, clear);
713 return;
715 if(!this->early_size) {
716 _Concurrent_vector_base_v4__Internal_copy(this, v, element_size, copy);
717 return;
719 seg_no = _vector_base_v4__Segment_index_of(this->early_size - 1);
720 v_seg_no = _vector_base_v4__Segment_index_of(v_size - 1);
722 for(i = 0; i < min(seg_no, v_seg_no); i++)
723 assign(this->segment[i], v->segment[i], i ? 1 << i : 2);
724 remain_element = min(this->early_size, v_size) - (1 << i & ~1);
725 if(remain_element != 0)
726 assign(this->segment[i], v->segment[i], remain_element);
728 if(this->early_size > v_size)
730 if((i ? 1 << i : 2) - remain_element > 0)
731 clear((BYTE**)this->segment[i] + element_size * remain_element,
732 (i ? 1 << i : 2) - remain_element);
733 if(i < seg_no)
735 for(i++; i < seg_no; i++)
736 clear(this->segment[i], 1 << i);
737 clear(this->segment[i], this->early_size - (1 << i));
740 else if(this->early_size < v_size)
742 if((i ? 1 << i : 2) - remain_element > 0)
743 copy((BYTE**)this->segment[i] + element_size * remain_element,
744 (BYTE**)v->segment[i] + element_size * remain_element,
745 (i ? 1 << i : 2) - remain_element);
746 if(i < v_seg_no)
748 _Concurrent_vector_base_v4__Internal_reserve(this, v_size,
749 element_size, ~(size_t)0 / element_size);
750 for(i++; i < v_seg_no; i++)
751 copy(this->segment[i], v->segment[i], 1 << i);
752 copy(this->segment[i], v->segment[i], v->early_size - (1 << i));
755 this->early_size = v_size;
758 /* ?_Internal_grow_by@_Concurrent_vector_base_v4@details@Concurrency@@IAEIIIP6AXPAXPBXI@Z1@Z */
759 /* ?_Internal_grow_by@_Concurrent_vector_base_v4@details@Concurrency@@IEAA_K_K0P6AXPEAXPEBX0@Z2@Z */
760 DEFINE_THISCALL_WRAPPER(_Concurrent_vector_base_v4__Internal_grow_by, 20)
761 size_t __thiscall _Concurrent_vector_base_v4__Internal_grow_by(
762 _Concurrent_vector_base_v4 *this, size_t count, size_t element_size,
763 void (__cdecl *copy)(void*, const void*, size_t), const void *v)
765 size_t size, seg_no, last_seg_no, remain_size;
767 TRACE("(%p %Iu %Iu %p %p)\n", this, count, element_size, copy, v);
769 if(count == 0) return this->early_size;
770 do {
771 size = this->early_size;
772 _Concurrent_vector_base_v4__Internal_reserve(this, size + count, element_size,
773 ~(size_t)0 / element_size);
774 } while(InterlockedCompareExchangeSizeT(&this->early_size, size + count, size) != size);
776 seg_no = size ? _vector_base_v4__Segment_index_of(size - 1) : 0;
777 last_seg_no = _vector_base_v4__Segment_index_of(size + count - 1);
778 remain_size = min(size + count, 1 << (seg_no + 1)) - size;
779 if(remain_size > 0)
780 copy(((BYTE**)this->segment[seg_no] + element_size * (size - ((1 << seg_no) & ~1))), v,
781 remain_size);
782 if(seg_no != last_seg_no)
784 for(seg_no++; seg_no < last_seg_no; seg_no++)
785 copy(this->segment[seg_no], v, 1 << seg_no);
786 copy(this->segment[last_seg_no], v, size + count - (1 << last_seg_no));
788 return size;
791 /* ?_Internal_grow_to_at_least_with_result@_Concurrent_vector_base_v4@details@Concurrency@@IAEIIIP6AXPAXPBXI@Z1@Z */
792 /* ?_Internal_grow_to_at_least_with_result@_Concurrent_vector_base_v4@details@Concurrency@@IEAA_K_K0P6AXPEAXPEBX0@Z2@Z */
793 DEFINE_THISCALL_WRAPPER(_Concurrent_vector_base_v4__Internal_grow_to_at_least_with_result, 20)
794 size_t __thiscall _Concurrent_vector_base_v4__Internal_grow_to_at_least_with_result(
795 _Concurrent_vector_base_v4 *this, size_t count, size_t element_size,
796 void (__cdecl *copy)(void*, const void*, size_t), const void *v)
798 size_t size, seg_no, last_seg_no, remain_size;
800 TRACE("(%p %Iu %Iu %p %p)\n", this, count, element_size, copy, v);
802 _Concurrent_vector_base_v4__Internal_reserve(this, count, element_size,
803 ~(size_t)0 / element_size);
804 do {
805 size = this->early_size;
806 if(size >= count) return size;
807 } while(InterlockedCompareExchangeSizeT(&this->early_size, count, size) != size);
809 seg_no = size ? _vector_base_v4__Segment_index_of(size - 1) : 0;
810 last_seg_no = _vector_base_v4__Segment_index_of(count - 1);
811 remain_size = min(count, 1 << (seg_no + 1)) - size;
812 if(remain_size > 0)
813 copy(((BYTE**)this->segment[seg_no] + element_size * (size - ((1 << seg_no) & ~1))), v,
814 remain_size);
815 if(seg_no != last_seg_no)
817 for(seg_no++; seg_no < last_seg_no; seg_no++)
818 copy(this->segment[seg_no], v, 1 << seg_no);
819 copy(this->segment[last_seg_no], v, count - (1 << last_seg_no));
821 return size;
824 /* ?_Internal_push_back@_Concurrent_vector_base_v4@details@Concurrency@@IAEPAXIAAI@Z */
825 /* ?_Internal_push_back@_Concurrent_vector_base_v4@details@Concurrency@@IEAAPEAX_KAEA_K@Z */
826 DEFINE_THISCALL_WRAPPER(_Concurrent_vector_base_v4__Internal_push_back, 12)
827 void * __thiscall _Concurrent_vector_base_v4__Internal_push_back(
828 _Concurrent_vector_base_v4 *this, size_t element_size, size_t *idx)
830 size_t index, seg, segment_base;
831 void *data;
833 TRACE("(%p %Iu %p)\n", this, element_size, idx);
835 do {
836 index = this->early_size;
837 _Concurrent_vector_base_v4__Internal_reserve(this, index + 1,
838 element_size, ~(size_t)0 / element_size);
839 } while(InterlockedCompareExchangeSizeT(&this->early_size, index + 1, index) != index);
840 seg = _vector_base_v4__Segment_index_of(index);
841 segment_base = (seg == 0) ? 0 : (1 << seg);
842 data = (BYTE*)this->segment[seg] + element_size * (index - segment_base);
843 *idx = index;
845 return data;
848 /* ?_Internal_resize@_Concurrent_vector_base_v4@details@Concurrency@@IAEXIIIP6AXPAXI@ZP6AX0PBXI@Z2@Z */
849 /* ?_Internal_resize@_Concurrent_vector_base_v4@details@Concurrency@@IEAAX_K00P6AXPEAX0@ZP6AX1PEBX0@Z3@Z */
850 DEFINE_THISCALL_WRAPPER(_Concurrent_vector_base_v4__Internal_resize, 28)
851 void __thiscall _Concurrent_vector_base_v4__Internal_resize(
852 _Concurrent_vector_base_v4 *this, size_t resize, size_t element_size,
853 size_t max_size, void (__cdecl *clear)(void*, size_t),
854 void (__cdecl *copy)(void*, const void*, size_t), const void *v)
856 size_t size, seg_no, end_seg_no, clear_element;
858 TRACE("(%p %Iu %Iu %Iu %p %p %p)\n", this, resize, element_size, max_size, clear, copy, v);
860 if(resize > max_size) _vector_base_v4__Internal_throw_exception(this, 0);
861 size = this->early_size;
862 if(resize > size)
863 _Concurrent_vector_base_v4__Internal_grow_to_at_least_with_result(this,
864 resize, element_size, copy, v);
865 else if(resize == 0)
866 _Concurrent_vector_base_v4__Internal_clear(this, clear);
867 else if(resize < size)
869 seg_no = _vector_base_v4__Segment_index_of(size - 1);
870 end_seg_no = _vector_base_v4__Segment_index_of(resize - 1);
871 clear_element = size - (seg_no ? 1 << seg_no : 2);
872 if(clear_element > 0)
873 clear(this->segment[seg_no], clear_element);
874 if(seg_no) seg_no--;
875 for(; seg_no > end_seg_no; seg_no--)
876 clear(this->segment[seg_no], 1 << seg_no);
877 clear_element = (1 << (end_seg_no + 1)) - resize;
878 if(clear_element > 0)
879 clear((BYTE**)this->segment[end_seg_no] + element_size * (resize - ((1 << end_seg_no) & ~1)),
880 clear_element);
881 this->early_size = resize;
885 /* ?_Internal_swap@_Concurrent_vector_base_v4@details@Concurrency@@IAEXAAV123@@Z */
886 /* ?_Internal_swap@_Concurrent_vector_base_v4@details@Concurrency@@IEAAXAEAV123@@Z */
887 DEFINE_THISCALL_WRAPPER(_Concurrent_vector_base_v4__Internal_swap, 8)
888 void __thiscall _Concurrent_vector_base_v4__Internal_swap(
889 _Concurrent_vector_base_v4 *this, _Concurrent_vector_base_v4 *v)
891 _Concurrent_vector_base_v4 temp;
893 TRACE("(%p %p)\n", this, v);
895 temp = *this;
896 *this = *v;
897 *v = temp;
898 if(v->segment == this->storage)
899 v->segment = v->storage;
900 if(this->segment == v->storage)
901 this->segment = this->storage;
904 /* ?is_current_task_group_canceling@Concurrency@@YA_NXZ */
905 bool __cdecl is_current_task_group_canceling(void)
907 return Context_IsCurrentTaskCollectionCanceling();
910 /* ?_GetCombinableSize@details@Concurrency@@YAIXZ */
911 /* ?_GetCombinableSize@details@Concurrency@@YA_KXZ */
912 size_t __cdecl _GetCombinableSize(void)
914 FIXME("() stub\n");
915 return 11;
918 typedef struct {
919 void *unk0;
920 BYTE unk1;
921 } task_continuation_context;
923 /* ??0task_continuation_context@Concurrency@@AAE@XZ */
924 /* ??0task_continuation_context@Concurrency@@AEAA@XZ */
925 DEFINE_THISCALL_WRAPPER(task_continuation_context_ctor, 4)
926 task_continuation_context* __thiscall task_continuation_context_ctor(task_continuation_context *this)
928 TRACE("(%p)\n", this);
929 memset(this, 0, sizeof(*this));
930 return this;
933 typedef struct {
934 const vtable_ptr *vtable;
935 void (__cdecl *func)(void);
936 int unk[4];
937 void *unk2[3];
938 void *this;
939 } function_void_cdecl_void;
941 /* ?_Assign@_ContextCallback@details@Concurrency@@AAEXPAX@Z */
942 /* ?_Assign@_ContextCallback@details@Concurrency@@AEAAXPEAX@Z */
943 DEFINE_THISCALL_WRAPPER(_ContextCallback__Assign, 8)
944 void __thiscall _ContextCallback__Assign(void *this, void *v)
946 TRACE("(%p %p)\n", this, v);
949 #define call_function_do_call(this) CALL_VTBL_FUNC(this, 8, void, (function_void_cdecl_void*), (this))
950 #define call_function_do_clean(this,b) CALL_VTBL_FUNC(this, 16, void, (function_void_cdecl_void*,bool), (this, b))
951 /* ?_CallInContext@_ContextCallback@details@Concurrency@@QBEXV?$function@$$A6AXXZ@std@@_N@Z */
952 /* ?_CallInContext@_ContextCallback@details@Concurrency@@QEBAXV?$function@$$A6AXXZ@std@@_N@Z */
953 DEFINE_THISCALL_WRAPPER(_ContextCallback__CallInContext, 48)
954 void __thiscall _ContextCallback__CallInContext(const void *this, function_void_cdecl_void func, bool b)
956 TRACE("(%p %p %x)\n", this, func.func, b);
957 call_function_do_call(func.this);
958 call_function_do_clean(func.this, func.this!=&func);
961 /* ?_Capture@_ContextCallback@details@Concurrency@@AAEXXZ */
962 /* ?_Capture@_ContextCallback@details@Concurrency@@AEAAXXZ */
963 DEFINE_THISCALL_WRAPPER(_ContextCallback__Capture, 4)
964 void __thiscall _ContextCallback__Capture(void *this)
966 TRACE("(%p)\n", this);
969 /* ?_Reset@_ContextCallback@details@Concurrency@@AAEXXZ */
970 /* ?_Reset@_ContextCallback@details@Concurrency@@AEAAXXZ */
971 DEFINE_THISCALL_WRAPPER(_ContextCallback__Reset, 4)
972 void __thiscall _ContextCallback__Reset(void *this)
974 TRACE("(%p)\n", this);
977 /* ?_IsCurrentOriginSTA@_ContextCallback@details@Concurrency@@CA_NXZ */
978 bool __cdecl _ContextCallback__IsCurrentOriginSTA(void *this)
980 TRACE("(%p)\n", this);
981 return FALSE;
984 typedef struct {
985 /*_Task_impl_base*/void *task;
986 bool scheduled;
987 bool started;
988 } _TaskEventLogger;
990 /* ?_LogCancelTask@_TaskEventLogger@details@Concurrency@@QAEXXZ */
991 /* ?_LogCancelTask@_TaskEventLogger@details@Concurrency@@QEAAXXZ */
992 DEFINE_THISCALL_WRAPPER(_TaskEventLogger__LogCancelTask, 4)
993 void __thiscall _TaskEventLogger__LogCancelTask(_TaskEventLogger *this)
995 TRACE("(%p)\n", this);
998 /* ?_LogScheduleTask@_TaskEventLogger@details@Concurrency@@QAEX_N@Z */
999 /* ?_LogScheduleTask@_TaskEventLogger@details@Concurrency@@QEAAX_N@Z */
1000 DEFINE_THISCALL_WRAPPER(_TaskEventLogger__LogScheduleTask, 8)
1001 void __thiscall _TaskEventLogger__LogScheduleTask(_TaskEventLogger *this, bool continuation)
1003 TRACE("(%p %x)\n", this, continuation);
1006 /* ?_LogTaskCompleted@_TaskEventLogger@details@Concurrency@@QAEXXZ */
1007 /* ?_LogTaskCompleted@_TaskEventLogger@details@Concurrency@@QEAAXXZ */
1008 DEFINE_THISCALL_WRAPPER(_TaskEventLogger__LogTaskCompleted, 4)
1009 void __thiscall _TaskEventLogger__LogTaskCompleted(_TaskEventLogger *this)
1011 TRACE("(%p)\n", this);
1014 /* ?_LogTaskExecutionCompleted@_TaskEventLogger@details@Concurrency@@QAEXXZ */
1015 /* ?_LogTaskExecutionCompleted@_TaskEventLogger@details@Concurrency@@QEAAXXZ */
1016 DEFINE_THISCALL_WRAPPER(_TaskEventLogger__LogTaskExecutionCompleted, 4)
1017 void __thiscall _TaskEventLogger__LogTaskExecutionCompleted(_TaskEventLogger *this)
1019 TRACE("(%p)\n", this);
1022 /* ?_LogWorkItemCompleted@_TaskEventLogger@details@Concurrency@@QAEXXZ */
1023 /* ?_LogWorkItemCompleted@_TaskEventLogger@details@Concurrency@@QEAAXXZ */
1024 DEFINE_THISCALL_WRAPPER(_TaskEventLogger__LogWorkItemCompleted, 4)
1025 void __thiscall _TaskEventLogger__LogWorkItemCompleted(_TaskEventLogger *this)
1027 TRACE("(%p)\n", this);
1030 /* ?_LogWorkItemStarted@_TaskEventLogger@details@Concurrency@@QAEXXZ */
1031 /* ?_LogWorkItemStarted@_TaskEventLogger@details@Concurrency@@QEAAXXZ */
1032 DEFINE_THISCALL_WRAPPER(_TaskEventLogger__LogWorkItemStarted, 4)
1033 void __thiscall _TaskEventLogger__LogWorkItemStarted(_TaskEventLogger *this)
1035 TRACE("(%p)\n", this);
1038 typedef struct {
1039 PTP_WORK work;
1040 void (__cdecl *callback)(void*);
1041 void *arg;
1042 } _Threadpool_chore;
1044 /* ?_Reschedule_chore@details@Concurrency@@YAHPBU_Threadpool_chore@12@@Z */
1045 /* ?_Reschedule_chore@details@Concurrency@@YAHPEBU_Threadpool_chore@12@@Z */
1046 int __cdecl _Reschedule_chore(const _Threadpool_chore *chore)
1048 TRACE("(%p)\n", chore);
1050 SubmitThreadpoolWork(chore->work);
1051 return 0;
1054 static void WINAPI threadpool_callback(PTP_CALLBACK_INSTANCE instance, void *context, PTP_WORK work)
1056 _Threadpool_chore *chore = context;
1057 TRACE("calling chore callback: %p\n", chore);
1058 if (chore->callback)
1059 chore->callback(chore->arg);
1062 /* ?_Schedule_chore@details@Concurrency@@YAHPAU_Threadpool_chore@12@@Z */
1063 /* ?_Schedule_chore@details@Concurrency@@YAHPEAU_Threadpool_chore@12@@Z */
1064 int __cdecl _Schedule_chore(_Threadpool_chore *chore)
1066 TRACE("(%p)\n", chore);
1068 chore->work = CreateThreadpoolWork(threadpool_callback, chore, NULL);
1069 /* FIXME: what should be returned in case of error */
1070 if(!chore->work)
1071 return -1;
1073 return _Reschedule_chore(chore);
1076 /* ?_Release_chore@details@Concurrency@@YAXPAU_Threadpool_chore@12@@Z */
1077 /* ?_Release_chore@details@Concurrency@@YAXPEAU_Threadpool_chore@12@@Z */
1078 void __cdecl _Release_chore(_Threadpool_chore *chore)
1080 TRACE("(%p)\n", chore);
1082 if(!chore->work) return;
1083 CloseThreadpoolWork(chore->work);
1084 chore->work = NULL;
1087 /* ?_IsNonBlockingThread@_Task_impl_base@details@Concurrency@@SA_NXZ */
1088 bool __cdecl _Task_impl_base__IsNonBlockingThread(void)
1090 FIXME("() stub\n");
1091 return FALSE;
1094 __ASM_BLOCK_BEGIN(concurrency_details_vtables)
1095 __ASM_VTABLE(_Concurrent_queue_base_v4,
1096 VTABLE_ADD_FUNC(_Concurrent_queue_base_v4_dummy)
1097 VTABLE_ADD_FUNC(_Concurrent_queue_base_v4_dummy)
1098 VTABLE_ADD_FUNC(_Concurrent_queue_base_v4_dummy)
1099 VTABLE_ADD_FUNC(_Concurrent_queue_base_v4_vector_dtor)
1100 VTABLE_ADD_FUNC(_Concurrent_queue_base_v4_dummy)
1101 VTABLE_ADD_FUNC(_Concurrent_queue_base_v4_dummy));
1102 __ASM_VTABLE(_Runtime_object,
1103 VTABLE_ADD_FUNC(_Runtime_object__GetId));
1104 __ASM_BLOCK_END
1106 void init_concurrency_details(void *base)
1108 #ifdef __x86_64__
1109 init__Concurrent_queue_base_v4_rtti(base);
1110 init__Runtime_object_rtti(base);
1111 #endif
1114 /* ?_Byte_reverse_table@details@Concurrency@@3QBEB */
1115 const BYTE byte_reverse_table[256] =
1117 0x00, 0x80, 0x40, 0xc0, 0x20, 0xa0, 0x60, 0xe0, 0x10, 0x90, 0x50, 0xd0, 0x30, 0xb0, 0x70, 0xf0,
1118 0x08, 0x88, 0x48, 0xc8, 0x28, 0xa8, 0x68, 0xe8, 0x18, 0x98, 0x58, 0xd8, 0x38, 0xb8, 0x78, 0xf8,
1119 0x04, 0x84, 0x44, 0xc4, 0x24, 0xa4, 0x64, 0xe4, 0x14, 0x94, 0x54, 0xd4, 0x34, 0xb4, 0x74, 0xf4,
1120 0x0c, 0x8c, 0x4c, 0xcc, 0x2c, 0xac, 0x6c, 0xec, 0x1c, 0x9c, 0x5c, 0xdc, 0x3c, 0xbc, 0x7c, 0xfc,
1121 0x02, 0x82, 0x42, 0xc2, 0x22, 0xa2, 0x62, 0xe2, 0x12, 0x92, 0x52, 0xd2, 0x32, 0xb2, 0x72, 0xf2,
1122 0x0a, 0x8a, 0x4a, 0xca, 0x2a, 0xaa, 0x6a, 0xea, 0x1a, 0x9a, 0x5a, 0xda, 0x3a, 0xba, 0x7a, 0xfa,
1123 0x06, 0x86, 0x46, 0xc6, 0x26, 0xa6, 0x66, 0xe6, 0x16, 0x96, 0x56, 0xd6, 0x36, 0xb6, 0x76, 0xf6,
1124 0x0e, 0x8e, 0x4e, 0xce, 0x2e, 0xae, 0x6e, 0xee, 0x1e, 0x9e, 0x5e, 0xde, 0x3e, 0xbe, 0x7e, 0xfe,
1125 0x01, 0x81, 0x41, 0xc1, 0x21, 0xa1, 0x61, 0xe1, 0x11, 0x91, 0x51, 0xd1, 0x31, 0xb1, 0x71, 0xf1,
1126 0x09, 0x89, 0x49, 0xc9, 0x29, 0xa9, 0x69, 0xe9, 0x19, 0x99, 0x59, 0xd9, 0x39, 0xb9, 0x79, 0xf9,
1127 0x05, 0x85, 0x45, 0xc5, 0x25, 0xa5, 0x65, 0xe5, 0x15, 0x95, 0x55, 0xd5, 0x35, 0xb5, 0x75, 0xf5,
1128 0x0d, 0x8d, 0x4d, 0xcd, 0x2d, 0xad, 0x6d, 0xed, 0x1d, 0x9d, 0x5d, 0xdd, 0x3d, 0xbd, 0x7d, 0xfd,
1129 0x03, 0x83, 0x43, 0xc3, 0x23, 0xa3, 0x63, 0xe3, 0x13, 0x93, 0x53, 0xd3, 0x33, 0xb3, 0x73, 0xf3,
1130 0x0b, 0x8b, 0x4b, 0xcb, 0x2b, 0xab, 0x6b, 0xeb, 0x1b, 0x9b, 0x5b, 0xdb, 0x3b, 0xbb, 0x7b, 0xfb,
1131 0x07, 0x87, 0x47, 0xc7, 0x27, 0xa7, 0x67, 0xe7, 0x17, 0x97, 0x57, 0xd7, 0x37, 0xb7, 0x77, 0xf7,
1132 0x0f, 0x8f, 0x4f, 0xcf, 0x2f, 0xaf, 0x6f, 0xef, 0x1f, 0x9f, 0x5f, 0xdf, 0x3f, 0xbf, 0x7f, 0xff,