cmd: DIR command outputs free space for the path.
[wine.git] / dlls / msvcp90 / details.c
blob98579dfa9919a75c5d58a150b0d36cff19639837
1 /*
2 * Copyright 2010 Piotr Caban for CodeWeavers
4 * This library is free software; you can redistribute it and/or
5 * modify it under the terms of the GNU Lesser General Public
6 * License as published by the Free Software Foundation; either
7 * version 2.1 of the License, or (at your option) any later version.
9 * This library is distributed in the hope that it will be useful,
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
12 * Lesser General Public License for more details.
14 * You should have received a copy of the GNU Lesser General Public
15 * License along with this library; if not, write to the Free Software
16 * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA
19 /* Keep in sync with concrt140/detail.c */
21 #include <stdarg.h>
22 #include "msvcp90.h"
24 #include "wine/debug.h"
25 #include "wine/exception.h"
27 #if _MSVCP_VER >= 100
29 WINE_DEFAULT_DEBUG_CHANNEL(msvcp);
31 typedef struct _Page
33 struct _Page *_Next;
34 size_t _Mask;
35 char data[1];
36 } _Page;
38 typedef struct
40 LONG lock;
41 _Page *head;
42 _Page *tail;
43 size_t head_pos;
44 size_t tail_pos;
45 } threadsafe_queue;
47 #define QUEUES_NO 8
48 typedef struct
50 size_t tail_pos;
51 size_t head_pos;
52 threadsafe_queue queues[QUEUES_NO];
53 } queue_data;
55 typedef struct
57 const vtable_ptr *vtable;
58 queue_data *data; /* queue_data structure is not binary compatible */
59 size_t alloc_count;
60 size_t item_size;
61 } _Concurrent_queue_base_v4;
63 extern const vtable_ptr _Concurrent_queue_base_v4_vtable;
64 #if _MSVCP_VER == 100
65 #define call__Concurrent_queue_base_v4__Move_item call__Concurrent_queue_base_v4__Copy_item
66 #define call__Concurrent_queue_base_v4__Copy_item(this,dst,idx,src) CALL_VTBL_FUNC(this, \
67 0, void, (_Concurrent_queue_base_v4*,_Page*,size_t,const void*), (this,dst,idx,src))
68 #define call__Concurrent_queue_base_v4__Assign_and_destroy_item(this,dst,src,idx) CALL_VTBL_FUNC(this, \
69 4, void, (_Concurrent_queue_base_v4*,void*,_Page*,size_t), (this,dst,src,idx))
70 #define call__Concurrent_queue_base_v4__Allocate_page(this) CALL_VTBL_FUNC(this, \
71 12, _Page*, (_Concurrent_queue_base_v4*), (this))
72 #define call__Concurrent_queue_base_v4__Deallocate_page(this, page) CALL_VTBL_FUNC(this, \
73 16, void, (_Concurrent_queue_base_v4*,_Page*), (this,page))
74 #else
75 #define call__Concurrent_queue_base_v4__Move_item(this,dst,idx,src) CALL_VTBL_FUNC(this, \
76 0, void, (_Concurrent_queue_base_v4*,_Page*,size_t,void*), (this,dst,idx,src))
77 #define call__Concurrent_queue_base_v4__Copy_item(this,dst,idx,src) CALL_VTBL_FUNC(this, \
78 4, void, (_Concurrent_queue_base_v4*,_Page*,size_t,const void*), (this,dst,idx,src))
79 #define call__Concurrent_queue_base_v4__Assign_and_destroy_item(this,dst,src,idx) CALL_VTBL_FUNC(this, \
80 8, void, (_Concurrent_queue_base_v4*,void*,_Page*,size_t), (this,dst,src,idx))
81 #define call__Concurrent_queue_base_v4__Allocate_page(this) CALL_VTBL_FUNC(this, \
82 16, _Page*, (_Concurrent_queue_base_v4*), (this))
83 #define call__Concurrent_queue_base_v4__Deallocate_page(this, page) CALL_VTBL_FUNC(this, \
84 20, void, (_Concurrent_queue_base_v4*,_Page*), (this,page))
85 #endif
87 /* ?_Internal_throw_exception@_Concurrent_queue_base_v4@details@Concurrency@@IBEXXZ */
88 /* ?_Internal_throw_exception@_Concurrent_queue_base_v4@details@Concurrency@@IEBAXXZ */
89 DEFINE_THISCALL_WRAPPER(_Concurrent_queue_base_v4__Internal_throw_exception, 4)
90 void __thiscall _Concurrent_queue_base_v4__Internal_throw_exception(
91 const _Concurrent_queue_base_v4 *this)
93 TRACE("(%p)\n", this);
94 _Xmem();
97 /* ??0_Concurrent_queue_base_v4@details@Concurrency@@IAE@I@Z */
98 /* ??0_Concurrent_queue_base_v4@details@Concurrency@@IEAA@_K@Z */
99 DEFINE_THISCALL_WRAPPER(_Concurrent_queue_base_v4_ctor, 8)
100 _Concurrent_queue_base_v4* __thiscall _Concurrent_queue_base_v4_ctor(
101 _Concurrent_queue_base_v4 *this, size_t size)
103 TRACE("(%p %Iu)\n", this, size);
105 this->data = operator_new(sizeof(*this->data));
106 memset(this->data, 0, sizeof(*this->data));
108 this->vtable = &_Concurrent_queue_base_v4_vtable;
109 this->item_size = size;
111 /* alloc_count needs to be power of 2 */
112 this->alloc_count =
113 size <= 8 ? 32 :
114 size <= 16 ? 16 :
115 size <= 32 ? 8 :
116 size <= 64 ? 4 :
117 size <= 128 ? 2 : 1;
118 return this;
121 /* ??1_Concurrent_queue_base_v4@details@Concurrency@@MAE@XZ */
122 /* ??1_Concurrent_queue_base_v4@details@Concurrency@@MEAA@XZ */
123 DEFINE_THISCALL_WRAPPER(_Concurrent_queue_base_v4_dtor, 4)
124 void __thiscall _Concurrent_queue_base_v4_dtor(_Concurrent_queue_base_v4 *this)
126 TRACE("(%p)\n", this);
127 operator_delete(this->data);
130 DEFINE_THISCALL_WRAPPER(_Concurrent_queue_base_v4_vector_dtor, 8)
131 _Concurrent_queue_base_v4* __thiscall _Concurrent_queue_base_v4_vector_dtor(
132 _Concurrent_queue_base_v4 *this, unsigned int flags)
134 TRACE("(%p %x)\n", this, flags);
135 if(flags & 2) {
136 /* we have an array, with the number of elements stored before the first object */
137 INT_PTR i, *ptr = (INT_PTR *)this-1;
139 for(i=*ptr-1; i>=0; i--)
140 _Concurrent_queue_base_v4_dtor(this+i);
141 operator_delete(ptr);
142 } else {
143 if(flags & 1)
144 _Concurrent_queue_base_v4_dtor(this);
145 operator_delete(this);
148 return this;
151 /* ?_Internal_finish_clear@_Concurrent_queue_base_v4@details@Concurrency@@IAEXXZ */
152 /* ?_Internal_finish_clear@_Concurrent_queue_base_v4@details@Concurrency@@IEAAXXZ */
153 DEFINE_THISCALL_WRAPPER(_Concurrent_queue_base_v4__Internal_finish_clear, 4)
154 void __thiscall _Concurrent_queue_base_v4__Internal_finish_clear(
155 _Concurrent_queue_base_v4 *this)
157 int i;
159 TRACE("(%p)\n", this);
161 for(i=0; i<QUEUES_NO; i++)
163 if(this->data->queues[i].tail)
164 call__Concurrent_queue_base_v4__Deallocate_page(this, this->data->queues[i].tail);
168 /* ?_Internal_empty@_Concurrent_queue_base_v4@details@Concurrency@@IBE_NXZ */
169 /* ?_Internal_empty@_Concurrent_queue_base_v4@details@Concurrency@@IEBA_NXZ */
170 DEFINE_THISCALL_WRAPPER(_Concurrent_queue_base_v4__Internal_empty, 4)
171 bool __thiscall _Concurrent_queue_base_v4__Internal_empty(
172 const _Concurrent_queue_base_v4 *this)
174 TRACE("(%p)\n", this);
175 return this->data->head_pos == this->data->tail_pos;
178 /* ?_Internal_size@_Concurrent_queue_base_v4@details@Concurrency@@IBEIXZ */
179 /* ?_Internal_size@_Concurrent_queue_base_v4@details@Concurrency@@IEBA_KXZ */
180 DEFINE_THISCALL_WRAPPER(_Concurrent_queue_base_v4__Internal_size, 4)
181 size_t __thiscall _Concurrent_queue_base_v4__Internal_size(
182 const _Concurrent_queue_base_v4 *this)
184 TRACE("(%p)\n", this);
185 return this->data->tail_pos - this->data->head_pos;
188 static void spin_wait(int *counter)
190 static int spin_limit = -1;
192 if(spin_limit == -1)
194 SYSTEM_INFO si;
195 GetSystemInfo(&si);
196 spin_limit = si.dwNumberOfProcessors>1 ? 4000 : 0;
199 if(*counter >= spin_limit)
201 *counter = 0;
202 Sleep(0);
204 else
206 (*counter)++;
210 static void CALLBACK queue_push_finally(BOOL normal, void *ctx)
212 threadsafe_queue *queue = ctx;
213 InterlockedIncrementSizeT(&queue->tail_pos);
216 static void threadsafe_queue_push(threadsafe_queue *queue, size_t id,
217 void *e, _Concurrent_queue_base_v4 *parent, BOOL copy)
219 size_t page_id = id & ~(parent->alloc_count-1);
220 int spin;
221 _Page *p;
223 spin = 0;
224 while(queue->tail_pos != id)
225 spin_wait(&spin);
227 if(page_id == id)
229 /* TODO: Add exception handling */
230 p = call__Concurrent_queue_base_v4__Allocate_page(parent);
231 p->_Next = NULL;
232 p->_Mask = 0;
234 spin = 0;
235 while(InterlockedCompareExchange(&queue->lock, 1, 0))
236 spin_wait(&spin);
237 if(queue->tail)
238 queue->tail->_Next = p;
239 queue->tail = p;
240 if(!queue->head)
241 queue->head = p;
242 WriteRelease(&queue->lock, 0);
244 else
246 p = queue->tail;
249 __TRY
251 if(copy)
252 call__Concurrent_queue_base_v4__Copy_item(parent, p, id-page_id, e);
253 else
254 call__Concurrent_queue_base_v4__Move_item(parent, p, id-page_id, e);
255 p->_Mask |= 1 << (id - page_id);
257 __FINALLY_CTX(queue_push_finally, queue);
260 static BOOL threadsafe_queue_pop(threadsafe_queue *queue, size_t id,
261 void *e, _Concurrent_queue_base_v4 *parent)
263 size_t page_id = id & ~(parent->alloc_count-1);
264 int spin;
265 _Page *p;
266 BOOL ret = FALSE;
268 spin = 0;
269 while(queue->tail_pos <= id)
270 spin_wait(&spin);
272 spin = 0;
273 while(queue->head_pos != id)
274 spin_wait(&spin);
276 p = queue->head;
277 if(p->_Mask & (1 << (id-page_id)))
279 /* TODO: Add exception handling */
280 call__Concurrent_queue_base_v4__Assign_and_destroy_item(parent, e, p, id-page_id);
281 ret = TRUE;
284 if(id == page_id+parent->alloc_count-1)
286 spin = 0;
287 while(InterlockedCompareExchange(&queue->lock, 1, 0))
288 spin_wait(&spin);
289 queue->head = p->_Next;
290 if(!queue->head)
291 queue->tail = NULL;
292 WriteRelease(&queue->lock, 0);
294 /* TODO: Add exception handling */
295 call__Concurrent_queue_base_v4__Deallocate_page(parent, p);
298 InterlockedIncrementSizeT(&queue->head_pos);
299 return ret;
302 /* ?_Internal_push@_Concurrent_queue_base_v4@details@Concurrency@@IAEXPBX@Z */
303 /* ?_Internal_push@_Concurrent_queue_base_v4@details@Concurrency@@IEAAXPEBX@Z */
304 DEFINE_THISCALL_WRAPPER(_Concurrent_queue_base_v4__Internal_push, 8)
305 void __thiscall _Concurrent_queue_base_v4__Internal_push(
306 _Concurrent_queue_base_v4 *this, void *e)
308 size_t id;
310 TRACE("(%p %p)\n", this, e);
312 id = InterlockedIncrementSizeT(&this->data->tail_pos)-1;
313 threadsafe_queue_push(this->data->queues + id % QUEUES_NO,
314 id / QUEUES_NO, e, this, TRUE);
317 /* ?_Internal_move_push@_Concurrent_queue_base_v4@details@Concurrency@@IAEXPAX@Z */
318 /* ?_Internal_move_push@_Concurrent_queue_base_v4@details@Concurrency@@IEAAXPEAX@Z */
319 DEFINE_THISCALL_WRAPPER(_Concurrent_queue_base_v4__Internal_move_push, 8)
320 void __thiscall _Concurrent_queue_base_v4__Internal_move_push(
321 _Concurrent_queue_base_v4 *this, void *e)
323 size_t id;
325 TRACE("(%p %p)\n", this, e);
327 id = InterlockedIncrementSizeT(&this->data->tail_pos)-1;
328 threadsafe_queue_push(this->data->queues + id % QUEUES_NO,
329 id / QUEUES_NO, e, this, FALSE);
332 /* ?_Internal_pop_if_present@_Concurrent_queue_base_v4@details@Concurrency@@IAE_NPAX@Z */
333 /* ?_Internal_pop_if_present@_Concurrent_queue_base_v4@details@Concurrency@@IEAA_NPEAX@Z */
334 DEFINE_THISCALL_WRAPPER(_Concurrent_queue_base_v4__Internal_pop_if_present, 8)
335 bool __thiscall _Concurrent_queue_base_v4__Internal_pop_if_present(
336 _Concurrent_queue_base_v4 *this, void *e)
338 size_t id;
340 TRACE("(%p %p)\n", this, e);
346 id = this->data->head_pos;
347 if(id == this->data->tail_pos) return FALSE;
348 } while(InterlockedCompareExchangePointer((void**)&this->data->head_pos,
349 (void*)(id+1), (void*)id) != (void*)id);
350 } while(!threadsafe_queue_pop(this->data->queues + id % QUEUES_NO,
351 id / QUEUES_NO, e, this));
352 return TRUE;
355 /* ?_Internal_swap@_Concurrent_queue_base_v4@details@Concurrency@@IAEXAAV123@@Z */
356 /* ?_Internal_swap@_Concurrent_queue_base_v4@details@Concurrency@@IEAAXAEAV123@@Z */
357 DEFINE_THISCALL_WRAPPER(_Concurrent_queue_base_v4__Internal_swap, 8)
358 void __thiscall _Concurrent_queue_base_v4__Internal_swap(
359 _Concurrent_queue_base_v4 *this, _Concurrent_queue_base_v4 *r)
361 FIXME("(%p %p) stub\n", this, r);
364 DEFINE_THISCALL_WRAPPER(_Concurrent_queue_base_v4_dummy, 4)
365 void __thiscall _Concurrent_queue_base_v4_dummy(_Concurrent_queue_base_v4 *this)
367 ERR("unexpected call\n");
370 DEFINE_RTTI_DATA0(_Concurrent_queue_base_v4, 0, ".?AV_Concurrent_queue_base_v4@details@Concurrency@@")
372 static LONG _Runtime_object_id;
374 typedef struct
376 const vtable_ptr *vtable;
377 int id;
378 } _Runtime_object;
380 extern const vtable_ptr _Runtime_object_vtable;
382 /* ??0_Runtime_object@details@Concurrency@@QAE@H@Z */
383 /* ??0_Runtime_object@details@Concurrency@@QEAA@H@Z */
384 DEFINE_THISCALL_WRAPPER(_Runtime_object_ctor_id, 8)
385 _Runtime_object* __thiscall _Runtime_object_ctor_id(_Runtime_object *this, int id)
387 TRACE("(%p %d)\n", this, id);
388 this->vtable = &_Runtime_object_vtable;
389 this->id = id;
390 return this;
393 /* ??0_Runtime_object@details@Concurrency@@QAE@XZ */
394 /* ??0_Runtime_object@details@Concurrency@@QEAA@XZ */
395 DEFINE_THISCALL_WRAPPER(_Runtime_object_ctor, 4)
396 _Runtime_object* __thiscall _Runtime_object_ctor(_Runtime_object *this)
398 TRACE("(%p)\n", this);
399 this->vtable = &_Runtime_object_vtable;
400 this->id = InterlockedExchangeAdd(&_Runtime_object_id, 2);
401 return this;
404 DEFINE_THISCALL_WRAPPER(_Runtime_object__GetId, 4)
405 int __thiscall _Runtime_object__GetId(_Runtime_object *this)
407 TRACE("(%p)\n", this);
408 return this->id;
411 DEFINE_RTTI_DATA0(_Runtime_object, 0, ".?AV_Runtime_object@details@Concurrency@@")
413 typedef struct __Concurrent_vector_base_v4
415 void* (__cdecl *allocator)(struct __Concurrent_vector_base_v4 *, size_t);
416 void *storage[3];
417 size_t first_block;
418 size_t early_size;
419 void **segment;
420 } _Concurrent_vector_base_v4;
422 #define STORAGE_SIZE ARRAY_SIZE(this->storage)
423 #define SEGMENT_SIZE (sizeof(void*) * 8)
425 typedef struct compact_block
427 size_t first_block;
428 void *blocks[SEGMENT_SIZE];
429 int size_check;
430 }compact_block;
432 /* Return the integer base-2 logarithm of (x|1). Result is 0 for x == 0. */
433 static inline unsigned int log2i(unsigned int x)
435 ULONG index;
436 BitScanReverse(&index, x|1);
437 return index;
440 /* ?_Segment_index_of@_Concurrent_vector_base_v4@details@Concurrency@@KAII@Z */
441 /* ?_Segment_index_of@_Concurrent_vector_base_v4@details@Concurrency@@KA_K_K@Z */
442 size_t __cdecl _vector_base_v4__Segment_index_of(size_t x)
444 unsigned int half;
446 TRACE("(%Iu)\n", x);
448 if((sizeof(x) == 8) && (half = x >> 32))
449 return log2i(half) + 32;
451 return log2i(x);
454 /* ?_Internal_throw_exception@_Concurrent_vector_base_v4@details@Concurrency@@IBEXI@Z */
455 /* ?_Internal_throw_exception@_Concurrent_vector_base_v4@details@Concurrency@@IEBAX_K@Z */
456 DEFINE_THISCALL_WRAPPER(_vector_base_v4__Internal_throw_exception, 8)
457 void __thiscall _vector_base_v4__Internal_throw_exception(void/*_vector_base_v4*/ *this, size_t idx)
459 TRACE("(%p %Iu)\n", this, idx);
461 switch(idx) {
462 case 0: _Xout_of_range("Index out of range");
463 case 1: _Xout_of_range("Index out of segments table range");
464 case 2: throw_range_error("Index is inside segment which failed to be allocated");
468 #ifdef _WIN64
469 #define InterlockedCompareExchangeSizeT(dest, exchange, cmp) InterlockedCompareExchangeSize((size_t *)dest, (size_t)exchange, (size_t)cmp)
470 static size_t InterlockedCompareExchangeSize(size_t volatile *dest, size_t exchange, size_t cmp)
472 size_t v;
474 v = InterlockedCompareExchange64((LONGLONG*)dest, exchange, cmp);
476 return v;
478 #else
479 #define InterlockedCompareExchangeSizeT(dest, exchange, cmp) InterlockedCompareExchange((LONG*)dest, (size_t)exchange, (size_t)cmp)
480 #endif
482 #define SEGMENT_ALLOC_MARKER ((void*)1)
484 static void concurrent_vector_alloc_segment(_Concurrent_vector_base_v4 *this,
485 size_t seg, size_t element_size)
487 int spin;
489 while(!this->segment[seg] || this->segment[seg] == SEGMENT_ALLOC_MARKER)
491 spin = 0;
492 while(this->segment[seg] == SEGMENT_ALLOC_MARKER)
493 spin_wait(&spin);
494 if(!InterlockedCompareExchangeSizeT((this->segment + seg),
495 SEGMENT_ALLOC_MARKER, 0))
497 __TRY
499 if(seg == 0)
500 this->segment[seg] = this->allocator(this, element_size * (1 << this->first_block));
501 else if(seg < this->first_block)
502 this->segment[seg] = (BYTE**)this->segment[0]
503 + element_size * (1 << seg);
504 else
505 this->segment[seg] = this->allocator(this, element_size * (1 << seg));
507 __EXCEPT_ALL
509 this->segment[seg] = NULL;
510 _CxxThrowException(NULL, NULL);
512 __ENDTRY
513 if(!this->segment[seg])
514 _vector_base_v4__Internal_throw_exception(this, 2);
519 /* ??1_Concurrent_vector_base_v4@details@Concurrency@@IAE@XZ */
520 /* ??1_Concurrent_vector_base_v4@details@Concurrency@@IEAA@XZ */
521 DEFINE_THISCALL_WRAPPER(_Concurrent_vector_base_v4_dtor, 4)
522 void __thiscall _Concurrent_vector_base_v4_dtor(
523 _Concurrent_vector_base_v4 *this)
525 TRACE("(%p)\n", this);
527 if(this->segment != this->storage)
528 free(this->segment);
531 /* ?_Internal_capacity@_Concurrent_vector_base_v4@details@Concurrency@@IBEIXZ */
532 /* ?_Internal_capacity@_Concurrent_vector_base_v4@details@Concurrency@@IEBA_KXZ */
533 DEFINE_THISCALL_WRAPPER(_Concurrent_vector_base_v4__Internal_capacity, 4)
534 size_t __thiscall _Concurrent_vector_base_v4__Internal_capacity(
535 const _Concurrent_vector_base_v4 *this)
537 size_t last_block;
538 int i;
540 TRACE("(%p)\n", this);
542 last_block = (this->segment == this->storage ? STORAGE_SIZE : SEGMENT_SIZE);
543 for(i = 0; i < last_block; i++)
545 if(!this->segment[i])
546 return !i ? 0 : 1 << i;
548 return 1 << i;
551 /* ?_Internal_reserve@_Concurrent_vector_base_v4@details@Concurrency@@IAEXIII@Z */
552 /* ?_Internal_reserve@_Concurrent_vector_base_v4@details@Concurrency@@IEAAX_K00@Z */
553 DEFINE_THISCALL_WRAPPER(_Concurrent_vector_base_v4__Internal_reserve, 16)
554 void __thiscall _Concurrent_vector_base_v4__Internal_reserve(
555 _Concurrent_vector_base_v4 *this, size_t size,
556 size_t element_size, size_t max_size)
558 size_t block_idx, capacity;
559 int i;
560 void **new_segment;
562 TRACE("(%p %Iu %Iu %Iu)\n", this, size, element_size, max_size);
564 if(size > max_size) _vector_base_v4__Internal_throw_exception(this, 0);
565 capacity = _Concurrent_vector_base_v4__Internal_capacity(this);
566 if(size <= capacity) return;
567 block_idx = _vector_base_v4__Segment_index_of(size - 1);
568 if(!this->first_block)
569 InterlockedCompareExchangeSizeT(&this->first_block, block_idx + 1, 0);
570 i = _vector_base_v4__Segment_index_of(capacity);
571 if(this->storage == this->segment) {
572 for(; i <= block_idx && i < STORAGE_SIZE; i++)
573 concurrent_vector_alloc_segment(this, i, element_size);
574 if(block_idx >= STORAGE_SIZE) {
575 new_segment = malloc(SEGMENT_SIZE * sizeof(void*));
576 if(new_segment == NULL) _vector_base_v4__Internal_throw_exception(this, 2);
577 memset(new_segment, 0, SEGMENT_SIZE * sizeof(*new_segment));
578 memcpy(new_segment, this->storage, STORAGE_SIZE * sizeof(*new_segment));
579 if(InterlockedCompareExchangePointer((void*)&this->segment, new_segment,
580 this->storage) != this->storage)
581 free(new_segment);
584 for(; i <= block_idx; i++)
585 concurrent_vector_alloc_segment(this, i, element_size);
588 /* ?_Internal_clear@_Concurrent_vector_base_v4@details@Concurrency@@IAEIP6AXPAXI@Z@Z */
589 /* ?_Internal_clear@_Concurrent_vector_base_v4@details@Concurrency@@IEAA_KP6AXPEAX_K@Z@Z */
590 DEFINE_THISCALL_WRAPPER(_Concurrent_vector_base_v4__Internal_clear, 8)
591 size_t __thiscall _Concurrent_vector_base_v4__Internal_clear(
592 _Concurrent_vector_base_v4 *this, void (__cdecl *clear)(void*, size_t))
594 size_t seg_no, elems;
595 int i;
597 TRACE("(%p %p)\n", this, clear);
599 seg_no = this->early_size ? _vector_base_v4__Segment_index_of(this->early_size) + 1 : 0;
600 for(i = seg_no - 1; i >= 0; i--) {
601 elems = this->early_size - (1 << i & ~1);
602 clear(this->segment[i], elems);
603 this->early_size -= elems;
605 while(seg_no < (this->segment == this->storage ? STORAGE_SIZE : SEGMENT_SIZE)) {
606 if(!this->segment[seg_no]) break;
607 seg_no++;
609 return seg_no;
612 /* ?_Internal_compact@_Concurrent_vector_base_v4@details@Concurrency@@IAEPAXIPAXP6AX0I@ZP6AX0PBXI@Z@Z */
613 /* ?_Internal_compact@_Concurrent_vector_base_v4@details@Concurrency@@IEAAPEAX_KPEAXP6AX10@ZP6AX1PEBX0@Z@Z */
614 DEFINE_THISCALL_WRAPPER(_Concurrent_vector_base_v4__Internal_compact, 20)
615 void * __thiscall _Concurrent_vector_base_v4__Internal_compact(
616 _Concurrent_vector_base_v4 *this, size_t element_size, void *v,
617 void (__cdecl *clear)(void*, size_t),
618 void (__cdecl *copy)(void*, const void*, size_t))
620 compact_block *b;
621 size_t size, alloc_size, seg_no, alloc_seg, copy_element, clear_element;
622 int i;
624 TRACE("(%p %Iu %p %p %p)\n", this, element_size, v, clear, copy);
626 size = this->early_size;
627 alloc_size = _Concurrent_vector_base_v4__Internal_capacity(this);
628 if(alloc_size == 0) return NULL;
629 alloc_seg = _vector_base_v4__Segment_index_of(alloc_size - 1);
630 if(!size) {
631 this->first_block = 0;
632 b = v;
633 b->first_block = alloc_seg + 1;
634 memset(b->blocks, 0, sizeof(b->blocks));
635 memcpy(b->blocks, this->segment,
636 (alloc_seg + 1) * sizeof(this->segment[0]));
637 memset(this->segment, 0, sizeof(this->segment[0]) * (alloc_seg + 1));
638 return v;
640 seg_no = _vector_base_v4__Segment_index_of(size - 1);
641 if(this->first_block == (seg_no + 1) && seg_no == alloc_seg) return NULL;
642 b = v;
643 b->first_block = this->first_block;
644 memset(b->blocks, 0, sizeof(b->blocks));
645 memcpy(b->blocks, this->segment,
646 (alloc_seg + 1) * sizeof(this->segment[0]));
647 if(this->first_block == (seg_no + 1) && seg_no != alloc_seg) {
648 memset(b->blocks, 0, sizeof(b->blocks[0]) * (seg_no + 1));
649 memset(&this->segment[seg_no + 1], 0, sizeof(this->segment[0]) * (alloc_seg - seg_no));
650 return v;
652 memset(this->segment, 0,
653 (alloc_seg + 1) * sizeof(this->segment[0]));
654 this->first_block = 0;
655 _Concurrent_vector_base_v4__Internal_reserve(this, size, element_size,
656 ~(size_t)0 / element_size);
657 for(i = 0; i < seg_no; i++)
658 copy(this->segment[i], b->blocks[i], i ? 1 << i : 2);
659 copy_element = size - ((1 << seg_no) & ~1);
660 if(copy_element > 0)
661 copy(this->segment[seg_no], b->blocks[seg_no], copy_element);
662 for(i = 0; i < seg_no; i++)
663 clear(b->blocks[i], i ? 1 << i : 2);
664 clear_element = size - ((1 << seg_no) & ~1);
665 if(clear_element > 0)
666 clear(b->blocks[seg_no], clear_element);
667 return v;
670 /* ?_Internal_copy@_Concurrent_vector_base_v4@details@Concurrency@@IAEXABV123@IP6AXPAXPBXI@Z@Z */
671 /* ?_Internal_copy@_Concurrent_vector_base_v4@details@Concurrency@@IEAAXAEBV123@_KP6AXPEAXPEBX1@Z@Z */
672 DEFINE_THISCALL_WRAPPER(_Concurrent_vector_base_v4__Internal_copy, 16)
673 void __thiscall _Concurrent_vector_base_v4__Internal_copy(
674 _Concurrent_vector_base_v4 *this, const _Concurrent_vector_base_v4 *v,
675 size_t element_size, void (__cdecl *copy)(void*, const void*, size_t))
677 size_t seg_no, v_size;
678 int i;
680 TRACE("(%p %p %Iu %p)\n", this, v, element_size, copy);
682 v_size = v->early_size;
683 if(!v_size) {
684 this->early_size = 0;
685 return;
687 _Concurrent_vector_base_v4__Internal_reserve(this, v_size,
688 element_size, ~(size_t)0 / element_size);
689 seg_no = _vector_base_v4__Segment_index_of(v_size - 1);
690 for(i = 0; i < seg_no; i++)
691 copy(this->segment[i], v->segment[i], i ? 1 << i : 2);
692 copy(this->segment[i], v->segment[i], v_size - (1 << i & ~1));
693 this->early_size = v_size;
696 /* ?_Internal_assign@_Concurrent_vector_base_v4@details@Concurrency@@IAEXABV123@IP6AXPAXI@ZP6AX1PBXI@Z4@Z */
697 /* ?_Internal_assign@_Concurrent_vector_base_v4@details@Concurrency@@IEAAXAEBV123@_KP6AXPEAX1@ZP6AX2PEBX1@Z5@Z */
698 DEFINE_THISCALL_WRAPPER(_Concurrent_vector_base_v4__Internal_assign, 24)
699 void __thiscall _Concurrent_vector_base_v4__Internal_assign(
700 _Concurrent_vector_base_v4 *this, const _Concurrent_vector_base_v4 *v,
701 size_t element_size, void (__cdecl *clear)(void*, size_t),
702 void (__cdecl *assign)(void*, const void*, size_t),
703 void (__cdecl *copy)(void*, const void*, size_t))
705 size_t v_size, seg_no, v_seg_no, remain_element;
706 int i;
708 TRACE("(%p %p %Iu %p %p %p)\n", this, v, element_size, clear, assign, copy);
710 v_size = v->early_size;
711 if(!v_size) {
712 _Concurrent_vector_base_v4__Internal_clear(this, clear);
713 return;
715 if(!this->early_size) {
716 _Concurrent_vector_base_v4__Internal_copy(this, v, element_size, copy);
717 return;
719 seg_no = _vector_base_v4__Segment_index_of(this->early_size - 1);
720 v_seg_no = _vector_base_v4__Segment_index_of(v_size - 1);
722 for(i = 0; i < min(seg_no, v_seg_no); i++)
723 assign(this->segment[i], v->segment[i], i ? 1 << i : 2);
724 remain_element = min(this->early_size, v_size) - (1 << i & ~1);
725 if(remain_element != 0)
726 assign(this->segment[i], v->segment[i], remain_element);
728 if(this->early_size > v_size)
730 if((i ? 1 << i : 2) - remain_element > 0)
731 clear((BYTE**)this->segment[i] + element_size * remain_element,
732 (i ? 1 << i : 2) - remain_element);
733 if(i < seg_no)
735 for(i++; i < seg_no; i++)
736 clear(this->segment[i], 1 << i);
737 clear(this->segment[i], this->early_size - (1 << i));
740 else if(this->early_size < v_size)
742 if((i ? 1 << i : 2) - remain_element > 0)
743 copy((BYTE**)this->segment[i] + element_size * remain_element,
744 (BYTE**)v->segment[i] + element_size * remain_element,
745 (i ? 1 << i : 2) - remain_element);
746 if(i < v_seg_no)
748 _Concurrent_vector_base_v4__Internal_reserve(this, v_size,
749 element_size, ~(size_t)0 / element_size);
750 for(i++; i < v_seg_no; i++)
751 copy(this->segment[i], v->segment[i], 1 << i);
752 copy(this->segment[i], v->segment[i], v->early_size - (1 << i));
755 this->early_size = v_size;
758 /* ?_Internal_grow_by@_Concurrent_vector_base_v4@details@Concurrency@@IAEIIIP6AXPAXPBXI@Z1@Z */
759 /* ?_Internal_grow_by@_Concurrent_vector_base_v4@details@Concurrency@@IEAA_K_K0P6AXPEAXPEBX0@Z2@Z */
760 DEFINE_THISCALL_WRAPPER(_Concurrent_vector_base_v4__Internal_grow_by, 20)
761 size_t __thiscall _Concurrent_vector_base_v4__Internal_grow_by(
762 _Concurrent_vector_base_v4 *this, size_t count, size_t element_size,
763 void (__cdecl *copy)(void*, const void*, size_t), const void *v)
765 size_t size, seg_no, last_seg_no, remain_size;
767 TRACE("(%p %Iu %Iu %p %p)\n", this, count, element_size, copy, v);
769 if(count == 0) return this->early_size;
770 do {
771 size = this->early_size;
772 _Concurrent_vector_base_v4__Internal_reserve(this, size + count, element_size,
773 ~(size_t)0 / element_size);
774 } while(InterlockedCompareExchangeSizeT(&this->early_size, size + count, size) != size);
776 seg_no = size ? _vector_base_v4__Segment_index_of(size - 1) : 0;
777 last_seg_no = _vector_base_v4__Segment_index_of(size + count - 1);
778 remain_size = min(size + count, 1 << (seg_no + 1)) - size;
779 if(remain_size > 0)
780 copy(((BYTE**)this->segment[seg_no] + element_size * (size - ((1 << seg_no) & ~1))), v,
781 remain_size);
782 if(seg_no != last_seg_no)
784 for(seg_no++; seg_no < last_seg_no; seg_no++)
785 copy(this->segment[seg_no], v, 1 << seg_no);
786 copy(this->segment[last_seg_no], v, size + count - (1 << last_seg_no));
788 return size;
791 /* ?_Internal_grow_to_at_least_with_result@_Concurrent_vector_base_v4@details@Concurrency@@IAEIIIP6AXPAXPBXI@Z1@Z */
792 /* ?_Internal_grow_to_at_least_with_result@_Concurrent_vector_base_v4@details@Concurrency@@IEAA_K_K0P6AXPEAXPEBX0@Z2@Z */
793 DEFINE_THISCALL_WRAPPER(_Concurrent_vector_base_v4__Internal_grow_to_at_least_with_result, 20)
794 size_t __thiscall _Concurrent_vector_base_v4__Internal_grow_to_at_least_with_result(
795 _Concurrent_vector_base_v4 *this, size_t count, size_t element_size,
796 void (__cdecl *copy)(void*, const void*, size_t), const void *v)
798 size_t size, seg_no, last_seg_no, remain_size;
800 TRACE("(%p %Iu %Iu %p %p)\n", this, count, element_size, copy, v);
802 _Concurrent_vector_base_v4__Internal_reserve(this, count, element_size,
803 ~(size_t)0 / element_size);
804 do {
805 size = this->early_size;
806 if(size >= count) return size;
807 } while(InterlockedCompareExchangeSizeT(&this->early_size, count, size) != size);
809 seg_no = size ? _vector_base_v4__Segment_index_of(size - 1) : 0;
810 last_seg_no = _vector_base_v4__Segment_index_of(count - 1);
811 remain_size = min(count, 1 << (seg_no + 1)) - size;
812 if(remain_size > 0)
813 copy(((BYTE**)this->segment[seg_no] + element_size * (size - ((1 << seg_no) & ~1))), v,
814 remain_size);
815 if(seg_no != last_seg_no)
817 for(seg_no++; seg_no < last_seg_no; seg_no++)
818 copy(this->segment[seg_no], v, 1 << seg_no);
819 copy(this->segment[last_seg_no], v, count - (1 << last_seg_no));
821 return size;
824 /* ?_Internal_push_back@_Concurrent_vector_base_v4@details@Concurrency@@IAEPAXIAAI@Z */
825 /* ?_Internal_push_back@_Concurrent_vector_base_v4@details@Concurrency@@IEAAPEAX_KAEA_K@Z */
826 DEFINE_THISCALL_WRAPPER(_Concurrent_vector_base_v4__Internal_push_back, 12)
827 void * __thiscall _Concurrent_vector_base_v4__Internal_push_back(
828 _Concurrent_vector_base_v4 *this, size_t element_size, size_t *idx)
830 size_t index, seg, segment_base;
831 void *data;
833 TRACE("(%p %Iu %p)\n", this, element_size, idx);
835 do {
836 index = this->early_size;
837 _Concurrent_vector_base_v4__Internal_reserve(this, index + 1,
838 element_size, ~(size_t)0 / element_size);
839 } while(InterlockedCompareExchangeSizeT(&this->early_size, index + 1, index) != index);
840 seg = _vector_base_v4__Segment_index_of(index);
841 segment_base = (seg == 0) ? 0 : (1 << seg);
842 data = (BYTE*)this->segment[seg] + element_size * (index - segment_base);
843 *idx = index;
845 return data;
848 /* ?_Internal_resize@_Concurrent_vector_base_v4@details@Concurrency@@IAEXIIIP6AXPAXI@ZP6AX0PBXI@Z2@Z */
849 /* ?_Internal_resize@_Concurrent_vector_base_v4@details@Concurrency@@IEAAX_K00P6AXPEAX0@ZP6AX1PEBX0@Z3@Z */
850 DEFINE_THISCALL_WRAPPER(_Concurrent_vector_base_v4__Internal_resize, 28)
851 void __thiscall _Concurrent_vector_base_v4__Internal_resize(
852 _Concurrent_vector_base_v4 *this, size_t resize, size_t element_size,
853 size_t max_size, void (__cdecl *clear)(void*, size_t),
854 void (__cdecl *copy)(void*, const void*, size_t), const void *v)
856 size_t size, seg_no, end_seg_no, clear_element;
858 TRACE("(%p %Iu %Iu %Iu %p %p %p)\n", this, resize, element_size, max_size, clear, copy, v);
860 if(resize > max_size) _vector_base_v4__Internal_throw_exception(this, 0);
861 size = this->early_size;
862 if(resize > size)
863 _Concurrent_vector_base_v4__Internal_grow_to_at_least_with_result(this,
864 resize, element_size, copy, v);
865 else if(resize == 0)
866 _Concurrent_vector_base_v4__Internal_clear(this, clear);
867 else if(resize < size)
869 seg_no = _vector_base_v4__Segment_index_of(size - 1);
870 end_seg_no = _vector_base_v4__Segment_index_of(resize - 1);
871 clear_element = size - (seg_no ? 1 << seg_no : 2);
872 if(clear_element > 0)
873 clear(this->segment[seg_no], clear_element);
874 if(seg_no) seg_no--;
875 for(; seg_no > end_seg_no; seg_no--)
876 clear(this->segment[seg_no], 1 << seg_no);
877 clear_element = (1 << (end_seg_no + 1)) - resize;
878 if(clear_element > 0)
879 clear((BYTE**)this->segment[end_seg_no] + element_size * (resize - ((1 << end_seg_no) & ~1)),
880 clear_element);
881 this->early_size = resize;
885 /* ?_Internal_swap@_Concurrent_vector_base_v4@details@Concurrency@@IAEXAAV123@@Z */
886 /* ?_Internal_swap@_Concurrent_vector_base_v4@details@Concurrency@@IEAAXAEAV123@@Z */
887 DEFINE_THISCALL_WRAPPER(_Concurrent_vector_base_v4__Internal_swap, 8)
888 void __thiscall _Concurrent_vector_base_v4__Internal_swap(
889 _Concurrent_vector_base_v4 *this, _Concurrent_vector_base_v4 *v)
891 _Concurrent_vector_base_v4 temp;
893 TRACE("(%p %p)\n", this, v);
895 temp = *this;
896 *this = *v;
897 *v = temp;
898 if(v->segment == this->storage)
899 v->segment = v->storage;
900 if(this->segment == v->storage)
901 this->segment = this->storage;
904 /* ?is_current_task_group_canceling@Concurrency@@YA_NXZ */
905 bool __cdecl is_current_task_group_canceling(void)
907 return Context_IsCurrentTaskCollectionCanceling();
910 /* ?_GetCombinableSize@details@Concurrency@@YAIXZ */
911 /* ?_GetCombinableSize@details@Concurrency@@YA_KXZ */
912 size_t __cdecl _GetCombinableSize(void)
914 FIXME("() stub\n");
915 return 11;
918 #if _MSVCP_VER >= 140
919 typedef struct {
920 void *unk0;
921 BYTE unk1;
922 } task_continuation_context;
924 /* ??0task_continuation_context@Concurrency@@AAE@XZ */
925 /* ??0task_continuation_context@Concurrency@@AEAA@XZ */
926 DEFINE_THISCALL_WRAPPER(task_continuation_context_ctor, 4)
927 task_continuation_context* __thiscall task_continuation_context_ctor(task_continuation_context *this)
929 TRACE("(%p)\n", this);
930 memset(this, 0, sizeof(*this));
931 return this;
934 typedef struct {
935 const vtable_ptr *vtable;
936 void (__cdecl *func)(void);
937 int unk[4];
938 void *unk2[3];
939 void *this;
940 } function_void_cdecl_void;
942 /* ?_Assign@_ContextCallback@details@Concurrency@@AAEXPAX@Z */
943 /* ?_Assign@_ContextCallback@details@Concurrency@@AEAAXPEAX@Z */
944 DEFINE_THISCALL_WRAPPER(_ContextCallback__Assign, 8)
945 void __thiscall _ContextCallback__Assign(void *this, void *v)
947 TRACE("(%p %p)\n", this, v);
950 #define call_function_do_call(this) CALL_VTBL_FUNC(this, 8, void, (function_void_cdecl_void*), (this))
951 #define call_function_do_clean(this,b) CALL_VTBL_FUNC(this, 16, void, (function_void_cdecl_void*,bool), (this, b))
952 /* ?_CallInContext@_ContextCallback@details@Concurrency@@QBEXV?$function@$$A6AXXZ@std@@_N@Z */
953 /* ?_CallInContext@_ContextCallback@details@Concurrency@@QEBAXV?$function@$$A6AXXZ@std@@_N@Z */
954 DEFINE_THISCALL_WRAPPER(_ContextCallback__CallInContext, 48)
955 void __thiscall _ContextCallback__CallInContext(const void *this, function_void_cdecl_void func, bool b)
957 TRACE("(%p %p %x)\n", this, func.func, b);
958 call_function_do_call(func.this);
959 call_function_do_clean(func.this, func.this!=&func);
962 /* ?_Capture@_ContextCallback@details@Concurrency@@AAEXXZ */
963 /* ?_Capture@_ContextCallback@details@Concurrency@@AEAAXXZ */
964 DEFINE_THISCALL_WRAPPER(_ContextCallback__Capture, 4)
965 void __thiscall _ContextCallback__Capture(void *this)
967 TRACE("(%p)\n", this);
970 /* ?_Reset@_ContextCallback@details@Concurrency@@AAEXXZ */
971 /* ?_Reset@_ContextCallback@details@Concurrency@@AEAAXXZ */
972 DEFINE_THISCALL_WRAPPER(_ContextCallback__Reset, 4)
973 void __thiscall _ContextCallback__Reset(void *this)
975 TRACE("(%p)\n", this);
978 /* ?_IsCurrentOriginSTA@_ContextCallback@details@Concurrency@@CA_NXZ */
979 bool __cdecl _ContextCallback__IsCurrentOriginSTA(void *this)
981 TRACE("(%p)\n", this);
982 return FALSE;
985 typedef struct {
986 /*_Task_impl_base*/void *task;
987 bool scheduled;
988 bool started;
989 } _TaskEventLogger;
991 /* ?_LogCancelTask@_TaskEventLogger@details@Concurrency@@QAEXXZ */
992 /* ?_LogCancelTask@_TaskEventLogger@details@Concurrency@@QEAAXXZ */
993 DEFINE_THISCALL_WRAPPER(_TaskEventLogger__LogCancelTask, 4)
994 void __thiscall _TaskEventLogger__LogCancelTask(_TaskEventLogger *this)
996 TRACE("(%p)\n", this);
999 /* ?_LogScheduleTask@_TaskEventLogger@details@Concurrency@@QAEX_N@Z */
1000 /* ?_LogScheduleTask@_TaskEventLogger@details@Concurrency@@QEAAX_N@Z */
1001 DEFINE_THISCALL_WRAPPER(_TaskEventLogger__LogScheduleTask, 8)
1002 void __thiscall _TaskEventLogger__LogScheduleTask(_TaskEventLogger *this, bool continuation)
1004 TRACE("(%p %x)\n", this, continuation);
1007 /* ?_LogTaskCompleted@_TaskEventLogger@details@Concurrency@@QAEXXZ */
1008 /* ?_LogTaskCompleted@_TaskEventLogger@details@Concurrency@@QEAAXXZ */
1009 DEFINE_THISCALL_WRAPPER(_TaskEventLogger__LogTaskCompleted, 4)
1010 void __thiscall _TaskEventLogger__LogTaskCompleted(_TaskEventLogger *this)
1012 TRACE("(%p)\n", this);
1015 /* ?_LogTaskExecutionCompleted@_TaskEventLogger@details@Concurrency@@QAEXXZ */
1016 /* ?_LogTaskExecutionCompleted@_TaskEventLogger@details@Concurrency@@QEAAXXZ */
1017 DEFINE_THISCALL_WRAPPER(_TaskEventLogger__LogTaskExecutionCompleted, 4)
1018 void __thiscall _TaskEventLogger__LogTaskExecutionCompleted(_TaskEventLogger *this)
1020 TRACE("(%p)\n", this);
1023 /* ?_LogWorkItemCompleted@_TaskEventLogger@details@Concurrency@@QAEXXZ */
1024 /* ?_LogWorkItemCompleted@_TaskEventLogger@details@Concurrency@@QEAAXXZ */
1025 DEFINE_THISCALL_WRAPPER(_TaskEventLogger__LogWorkItemCompleted, 4)
1026 void __thiscall _TaskEventLogger__LogWorkItemCompleted(_TaskEventLogger *this)
1028 TRACE("(%p)\n", this);
1031 /* ?_LogWorkItemStarted@_TaskEventLogger@details@Concurrency@@QAEXXZ */
1032 /* ?_LogWorkItemStarted@_TaskEventLogger@details@Concurrency@@QEAAXXZ */
1033 DEFINE_THISCALL_WRAPPER(_TaskEventLogger__LogWorkItemStarted, 4)
1034 void __thiscall _TaskEventLogger__LogWorkItemStarted(_TaskEventLogger *this)
1036 TRACE("(%p)\n", this);
1039 typedef struct {
1040 PTP_WORK work;
1041 void (__cdecl *callback)(void*);
1042 void *arg;
1043 } _Threadpool_chore;
1045 /* ?_Reschedule_chore@details@Concurrency@@YAHPBU_Threadpool_chore@12@@Z */
1046 /* ?_Reschedule_chore@details@Concurrency@@YAHPEBU_Threadpool_chore@12@@Z */
1047 int __cdecl _Reschedule_chore(const _Threadpool_chore *chore)
1049 TRACE("(%p)\n", chore);
1051 SubmitThreadpoolWork(chore->work);
1052 return 0;
1055 static void WINAPI threadpool_callback(PTP_CALLBACK_INSTANCE instance, void *context, PTP_WORK work)
1057 _Threadpool_chore *chore = context;
1058 TRACE("calling chore callback: %p\n", chore);
1059 if (chore->callback)
1060 chore->callback(chore->arg);
1063 /* ?_Schedule_chore@details@Concurrency@@YAHPAU_Threadpool_chore@12@@Z */
1064 /* ?_Schedule_chore@details@Concurrency@@YAHPEAU_Threadpool_chore@12@@Z */
1065 int __cdecl _Schedule_chore(_Threadpool_chore *chore)
1067 TRACE("(%p)\n", chore);
1069 chore->work = CreateThreadpoolWork(threadpool_callback, chore, NULL);
1070 /* FIXME: what should be returned in case of error */
1071 if(!chore->work)
1072 return -1;
1074 return _Reschedule_chore(chore);
1077 /* ?_Release_chore@details@Concurrency@@YAXPAU_Threadpool_chore@12@@Z */
1078 /* ?_Release_chore@details@Concurrency@@YAXPEAU_Threadpool_chore@12@@Z */
1079 void __cdecl _Release_chore(_Threadpool_chore *chore)
1081 TRACE("(%p)\n", chore);
1083 if(!chore->work) return;
1084 CloseThreadpoolWork(chore->work);
1085 chore->work = NULL;
1088 /* ?_IsNonBlockingThread@_Task_impl_base@details@Concurrency@@SA_NXZ */
1089 bool __cdecl _Task_impl_base__IsNonBlockingThread(void)
1091 FIXME("() stub\n");
1092 return FALSE;
1095 /* ?ReportUnhandledError@_ExceptionHolder@details@Concurrency@@AAAXXZ */
1096 /* ?ReportUnhandledError@_ExceptionHolder@details@Concurrency@@AAEXXZ */
1097 /* ?ReportUnhandledError@_ExceptionHolder@details@Concurrency@@AEAAXXZ */
1098 DEFINE_THISCALL_WRAPPER(_ExceptionHolder__ReportUnhandledError, 4)
1099 void __thiscall _ExceptionHolder__ReportUnhandledError(void *this)
1101 FIXME("(%p) stub\n", this);
1103 #endif
1105 __ASM_BLOCK_BEGIN(concurrency_details_vtables)
1106 __ASM_VTABLE(_Concurrent_queue_base_v4,
1107 #if _MSVCP_VER >= 110
1108 VTABLE_ADD_FUNC(_Concurrent_queue_base_v4_dummy)
1109 #endif
1110 VTABLE_ADD_FUNC(_Concurrent_queue_base_v4_dummy)
1111 VTABLE_ADD_FUNC(_Concurrent_queue_base_v4_dummy)
1112 VTABLE_ADD_FUNC(_Concurrent_queue_base_v4_vector_dtor)
1113 VTABLE_ADD_FUNC(_Concurrent_queue_base_v4_dummy)
1114 VTABLE_ADD_FUNC(_Concurrent_queue_base_v4_dummy));
1115 __ASM_VTABLE(_Runtime_object,
1116 VTABLE_ADD_FUNC(_Runtime_object__GetId));
1117 __ASM_BLOCK_END
1119 void init_concurrency_details(void *base)
1121 #ifdef __x86_64__
1122 init__Concurrent_queue_base_v4_rtti(base);
1123 init__Runtime_object_rtti(base);
1124 #endif
1126 #endif
1128 #if _MSVCP_VER >= 110
1129 /* ?_Byte_reverse_table@details@Concurrency@@3QBEB */
1130 const BYTE byte_reverse_table[256] =
1132 0x00, 0x80, 0x40, 0xc0, 0x20, 0xa0, 0x60, 0xe0, 0x10, 0x90, 0x50, 0xd0, 0x30, 0xb0, 0x70, 0xf0,
1133 0x08, 0x88, 0x48, 0xc8, 0x28, 0xa8, 0x68, 0xe8, 0x18, 0x98, 0x58, 0xd8, 0x38, 0xb8, 0x78, 0xf8,
1134 0x04, 0x84, 0x44, 0xc4, 0x24, 0xa4, 0x64, 0xe4, 0x14, 0x94, 0x54, 0xd4, 0x34, 0xb4, 0x74, 0xf4,
1135 0x0c, 0x8c, 0x4c, 0xcc, 0x2c, 0xac, 0x6c, 0xec, 0x1c, 0x9c, 0x5c, 0xdc, 0x3c, 0xbc, 0x7c, 0xfc,
1136 0x02, 0x82, 0x42, 0xc2, 0x22, 0xa2, 0x62, 0xe2, 0x12, 0x92, 0x52, 0xd2, 0x32, 0xb2, 0x72, 0xf2,
1137 0x0a, 0x8a, 0x4a, 0xca, 0x2a, 0xaa, 0x6a, 0xea, 0x1a, 0x9a, 0x5a, 0xda, 0x3a, 0xba, 0x7a, 0xfa,
1138 0x06, 0x86, 0x46, 0xc6, 0x26, 0xa6, 0x66, 0xe6, 0x16, 0x96, 0x56, 0xd6, 0x36, 0xb6, 0x76, 0xf6,
1139 0x0e, 0x8e, 0x4e, 0xce, 0x2e, 0xae, 0x6e, 0xee, 0x1e, 0x9e, 0x5e, 0xde, 0x3e, 0xbe, 0x7e, 0xfe,
1140 0x01, 0x81, 0x41, 0xc1, 0x21, 0xa1, 0x61, 0xe1, 0x11, 0x91, 0x51, 0xd1, 0x31, 0xb1, 0x71, 0xf1,
1141 0x09, 0x89, 0x49, 0xc9, 0x29, 0xa9, 0x69, 0xe9, 0x19, 0x99, 0x59, 0xd9, 0x39, 0xb9, 0x79, 0xf9,
1142 0x05, 0x85, 0x45, 0xc5, 0x25, 0xa5, 0x65, 0xe5, 0x15, 0x95, 0x55, 0xd5, 0x35, 0xb5, 0x75, 0xf5,
1143 0x0d, 0x8d, 0x4d, 0xcd, 0x2d, 0xad, 0x6d, 0xed, 0x1d, 0x9d, 0x5d, 0xdd, 0x3d, 0xbd, 0x7d, 0xfd,
1144 0x03, 0x83, 0x43, 0xc3, 0x23, 0xa3, 0x63, 0xe3, 0x13, 0x93, 0x53, 0xd3, 0x33, 0xb3, 0x73, 0xf3,
1145 0x0b, 0x8b, 0x4b, 0xcb, 0x2b, 0xab, 0x6b, 0xeb, 0x1b, 0x9b, 0x5b, 0xdb, 0x3b, 0xbb, 0x7b, 0xfb,
1146 0x07, 0x87, 0x47, 0xc7, 0x27, 0xa7, 0x67, 0xe7, 0x17, 0x97, 0x57, 0xd7, 0x37, 0xb7, 0x77, 0xf7,
1147 0x0f, 0x8f, 0x4f, 0xcf, 0x2f, 0xaf, 0x6f, 0xef, 0x1f, 0x9f, 0x5f, 0xdf, 0x3f, 0xbf, 0x7f, 0xff,
1149 #endif