4 * Copyright 1996 Alexandre Julliard
5 * Copyright 1998 Ulrich Weigand
7 * This library is free software; you can redistribute it and/or
8 * modify it under the terms of the GNU Lesser General Public
9 * License as published by the Free Software Foundation; either
10 * version 2.1 of the License, or (at your option) any later version.
12 * This library is distributed in the hope that it will be useful,
13 * but WITHOUT ANY WARRANTY; without even the implied warranty of
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
15 * Lesser General Public License for more details.
17 * You should have received a copy of the GNU Lesser General Public
18 * License along with this library; if not, write to the Free Software
19 * Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301, USA
23 #include "wine/port.h"
30 #ifdef HAVE_VALGRIND_MEMCHECK_H
31 #include <valgrind/memcheck.h>
33 #define RUNNING_ON_VALGRIND 0
36 #define NONAMELESSUNION
37 #define NONAMELESSSTRUCT
39 #define WIN32_NO_STATUS
43 #include "wine/list.h"
44 #include "wine/debug.h"
45 #include "wine/server.h"
47 WINE_DEFAULT_DEBUG_CHANNEL(heap
);
49 /* Note: the heap data structures are loosely based on what Pietrek describes in his
50 * book 'Windows 95 System Programming Secrets', with some adaptations for
51 * better compatibility with NT.
54 typedef struct tagARENA_INUSE
56 DWORD size
; /* Block size; must be the first field */
57 DWORD magic
: 24; /* Magic number */
58 DWORD unused_bytes
: 8; /* Number of bytes in the block not used by user data (max value is HEAP_MIN_DATA_SIZE+HEAP_MIN_SHRINK_SIZE) */
61 typedef struct tagARENA_FREE
63 DWORD size
; /* Block size; must be the first field */
64 DWORD magic
; /* Magic number */
65 struct list entry
; /* Entry in free list */
70 struct list entry
; /* entry in heap large blocks list */
71 SIZE_T data_size
; /* size of user data */
72 SIZE_T block_size
; /* total size of virtual memory block */
73 DWORD pad
[2]; /* padding to ensure 16-byte alignment of data */
74 DWORD size
; /* fields for compatibility with normal arenas */
75 DWORD magic
; /* these must remain at the end of the structure */
78 #define ARENA_FLAG_FREE 0x00000001 /* flags OR'ed with arena size */
79 #define ARENA_FLAG_PREV_FREE 0x00000002
80 #define ARENA_SIZE_MASK (~3)
81 #define ARENA_LARGE_SIZE 0xfedcba90 /* magic value for 'size' field in large blocks */
83 /* Value for arena 'magic' field */
84 #define ARENA_INUSE_MAGIC 0x455355
85 #define ARENA_PENDING_MAGIC 0xbedead
86 #define ARENA_FREE_MAGIC 0x45455246
87 #define ARENA_LARGE_MAGIC 0x6752614c
89 #define ARENA_INUSE_FILLER 0x55
90 #define ARENA_TAIL_FILLER 0xab
91 #define ARENA_FREE_FILLER 0xfeeefeee
93 /* everything is aligned on 8 byte boundaries (16 for Win64) */
94 #define ALIGNMENT (2*sizeof(void*))
95 #define LARGE_ALIGNMENT 16 /* large blocks have stricter alignment */
96 #define ARENA_OFFSET (ALIGNMENT - sizeof(ARENA_INUSE))
98 C_ASSERT( sizeof(ARENA_LARGE
) % LARGE_ALIGNMENT
== 0 );
100 #define ROUND_SIZE(size) ((((size) + ALIGNMENT - 1) & ~(ALIGNMENT-1)) + ARENA_OFFSET)
102 #define QUIET 1 /* Suppress messages */
103 #define NOISY 0 /* Report all errors */
105 /* minimum data size (without arenas) of an allocated block */
106 /* make sure that it's larger than a free list entry */
107 #define HEAP_MIN_DATA_SIZE ROUND_SIZE(2 * sizeof(struct list))
108 /* minimum size that must remain to shrink an allocated block */
109 #define HEAP_MIN_SHRINK_SIZE (HEAP_MIN_DATA_SIZE+sizeof(ARENA_FREE))
110 /* minimum size to start allocating large blocks */
111 #define HEAP_MIN_LARGE_BLOCK_SIZE 0x7f000
112 /* extra size to add at the end of block for tail checking */
113 #define HEAP_TAIL_EXTRA_SIZE(flags) \
114 ((flags & HEAP_TAIL_CHECKING_ENABLED) || RUNNING_ON_VALGRIND ? ALIGNMENT : 0)
116 /* Max size of the blocks on the free lists */
117 static const SIZE_T HEAP_freeListSizes
[] =
119 0x10, 0x20, 0x30, 0x40, 0x60, 0x80, 0x100, 0x200, 0x400, 0x1000, ~0UL
121 #define HEAP_NB_FREE_LISTS (sizeof(HEAP_freeListSizes)/sizeof(HEAP_freeListSizes[0]))
131 typedef struct tagSUBHEAP
133 void *base
; /* Base address of the sub-heap memory block */
134 SIZE_T size
; /* Size of the whole sub-heap */
135 SIZE_T min_commit
; /* Minimum committed size */
136 SIZE_T commitSize
; /* Committed size of the sub-heap */
137 struct list entry
; /* Entry in sub-heap list */
138 struct tagHEAP
*heap
; /* Main heap structure */
139 DWORD headerSize
; /* Size of the heap header */
140 DWORD magic
; /* Magic number */
143 #define SUBHEAP_MAGIC ((DWORD)('S' | ('U'<<8) | ('B'<<16) | ('H'<<24)))
145 typedef struct tagHEAP
147 DWORD_PTR unknown1
[2];
149 DWORD flags
; /* Heap flags */
150 DWORD force_flags
; /* Forced heap flags for debugging */
151 SUBHEAP subheap
; /* First sub-heap */
152 struct list entry
; /* Entry in process heap list */
153 struct list subheap_list
; /* Sub-heap list */
154 struct list large_list
; /* Large blocks list */
155 SIZE_T grow_size
; /* Size of next subheap for growing heap */
156 DWORD magic
; /* Magic number */
157 DWORD pending_pos
; /* Position in pending free requests ring */
158 ARENA_INUSE
**pending_free
; /* Ring buffer for pending free requests */
159 RTL_CRITICAL_SECTION critSection
; /* Critical section for serialization */
160 FREE_LIST_ENTRY
*freeList
; /* Free lists */
163 #define HEAP_MAGIC ((DWORD)('H' | ('E'<<8) | ('A'<<16) | ('P'<<24)))
165 #define HEAP_DEF_SIZE 0x110000 /* Default heap size = 1Mb + 64Kb */
166 #define COMMIT_MASK 0xffff /* bitmask for commit/decommit granularity */
167 #define MAX_FREE_PENDING 1024 /* max number of free requests to delay */
169 /* some undocumented flags (names are made up) */
170 #define HEAP_PAGE_ALLOCS 0x01000000
171 #define HEAP_VALIDATE 0x10000000
172 #define HEAP_VALIDATE_ALL 0x20000000
173 #define HEAP_VALIDATE_PARAMS 0x40000000
175 static HEAP
*processHeap
; /* main process heap */
177 static BOOL
HEAP_IsRealArena( HEAP
*heapPtr
, DWORD flags
, LPCVOID block
, BOOL quiet
);
179 /* mark a block of memory as free for debugging purposes */
180 static inline void mark_block_free( void *ptr
, SIZE_T size
, DWORD flags
)
182 if (flags
& HEAP_FREE_CHECKING_ENABLED
)
185 for (i
= 0; i
< size
/ sizeof(DWORD
); i
++) ((DWORD
*)ptr
)[i
] = ARENA_FREE_FILLER
;
187 #if defined(VALGRIND_MAKE_MEM_NOACCESS)
188 VALGRIND_DISCARD( VALGRIND_MAKE_MEM_NOACCESS( ptr
, size
));
189 #elif defined( VALGRIND_MAKE_NOACCESS)
190 VALGRIND_DISCARD( VALGRIND_MAKE_NOACCESS( ptr
, size
));
194 /* mark a block of memory as initialized for debugging purposes */
195 static inline void mark_block_initialized( void *ptr
, SIZE_T size
)
197 #if defined(VALGRIND_MAKE_MEM_DEFINED)
198 VALGRIND_DISCARD( VALGRIND_MAKE_MEM_DEFINED( ptr
, size
));
199 #elif defined(VALGRIND_MAKE_READABLE)
200 VALGRIND_DISCARD( VALGRIND_MAKE_READABLE( ptr
, size
));
204 /* mark a block of memory as uninitialized for debugging purposes */
205 static inline void mark_block_uninitialized( void *ptr
, SIZE_T size
)
207 #if defined(VALGRIND_MAKE_MEM_UNDEFINED)
208 VALGRIND_DISCARD( VALGRIND_MAKE_MEM_UNDEFINED( ptr
, size
));
209 #elif defined(VALGRIND_MAKE_WRITABLE)
210 VALGRIND_DISCARD( VALGRIND_MAKE_WRITABLE( ptr
, size
));
214 /* mark a block of memory as a tail block */
215 static inline void mark_block_tail( void *ptr
, SIZE_T size
, DWORD flags
)
217 if (flags
& HEAP_TAIL_CHECKING_ENABLED
)
219 mark_block_uninitialized( ptr
, size
);
220 memset( ptr
, ARENA_TAIL_FILLER
, size
);
222 #if defined(VALGRIND_MAKE_MEM_NOACCESS)
223 VALGRIND_DISCARD( VALGRIND_MAKE_MEM_NOACCESS( ptr
, size
));
224 #elif defined( VALGRIND_MAKE_NOACCESS)
225 VALGRIND_DISCARD( VALGRIND_MAKE_NOACCESS( ptr
, size
));
229 /* initialize contents of a newly created block of memory */
230 static inline void initialize_block( void *ptr
, SIZE_T size
, SIZE_T unused
, DWORD flags
)
232 if (flags
& HEAP_ZERO_MEMORY
)
234 mark_block_initialized( ptr
, size
);
235 memset( ptr
, 0, size
);
239 mark_block_uninitialized( ptr
, size
);
240 if (flags
& HEAP_FREE_CHECKING_ENABLED
)
242 memset( ptr
, ARENA_INUSE_FILLER
, size
);
243 mark_block_uninitialized( ptr
, size
);
247 mark_block_tail( (char *)ptr
+ size
, unused
, flags
);
250 /* notify that a new block of memory has been allocated for debugging purposes */
251 static inline void notify_alloc( void *ptr
, SIZE_T size
, BOOL init
)
253 #ifdef VALGRIND_MALLOCLIKE_BLOCK
254 VALGRIND_MALLOCLIKE_BLOCK( ptr
, size
, 0, init
);
258 /* notify that a block of memory has been freed for debugging purposes */
259 static inline void notify_free( void const *ptr
)
261 #ifdef VALGRIND_FREELIKE_BLOCK
262 VALGRIND_FREELIKE_BLOCK( ptr
, 0 );
266 static void subheap_notify_free_all(SUBHEAP
const *subheap
)
268 #ifdef VALGRIND_FREELIKE_BLOCK
269 char const *ptr
= (char const *)subheap
->base
+ subheap
->headerSize
;
271 if (!RUNNING_ON_VALGRIND
) return;
273 while (ptr
< (char const *)subheap
->base
+ subheap
->size
)
275 if (*(const DWORD
*)ptr
& ARENA_FLAG_FREE
)
277 ARENA_FREE
const *pArena
= (ARENA_FREE
const *)ptr
;
278 if (pArena
->magic
!=ARENA_FREE_MAGIC
) ERR("bad free_magic @%p\n", pArena
);
279 ptr
+= sizeof(*pArena
) + (pArena
->size
& ARENA_SIZE_MASK
);
283 ARENA_INUSE
const *pArena
= (ARENA_INUSE
const *)ptr
;
284 if (pArena
->magic
== ARENA_INUSE_MAGIC
) notify_free(pArena
+ 1);
285 else if (pArena
->magic
!= ARENA_PENDING_MAGIC
) ERR("bad inuse_magic @%p\n", pArena
);
286 ptr
+= sizeof(*pArena
) + (pArena
->size
& ARENA_SIZE_MASK
);
292 /* locate a free list entry of the appropriate size */
293 /* size is the size of the whole block including the arena header */
294 static inline unsigned int get_freelist_index( SIZE_T size
)
298 size
-= sizeof(ARENA_FREE
);
299 for (i
= 0; i
< HEAP_NB_FREE_LISTS
- 1; i
++) if (size
<= HEAP_freeListSizes
[i
]) break;
303 /* get the memory protection type to use for a given heap */
304 static inline ULONG
get_protection_type( DWORD flags
)
306 return (flags
& HEAP_CREATE_ENABLE_EXECUTE
) ? PAGE_EXECUTE_READWRITE
: PAGE_READWRITE
;
309 static RTL_CRITICAL_SECTION_DEBUG process_heap_critsect_debug
=
311 0, 0, NULL
, /* will be set later */
312 { &process_heap_critsect_debug
.ProcessLocksList
, &process_heap_critsect_debug
.ProcessLocksList
},
313 0, 0, { (DWORD_PTR
)(__FILE__
": main process heap section") }
317 /***********************************************************************
320 static void HEAP_Dump( HEAP
*heap
)
326 DPRINTF( "Heap: %p\n", heap
);
327 DPRINTF( "Next: %p Sub-heaps:", LIST_ENTRY( heap
->entry
.next
, HEAP
, entry
) );
328 LIST_FOR_EACH_ENTRY( subheap
, &heap
->subheap_list
, SUBHEAP
, entry
) DPRINTF( " %p", subheap
);
330 DPRINTF( "\nFree lists:\n Block Stat Size Id\n" );
331 for (i
= 0; i
< HEAP_NB_FREE_LISTS
; i
++)
332 DPRINTF( "%p free %08lx prev=%p next=%p\n",
333 &heap
->freeList
[i
].arena
, HEAP_freeListSizes
[i
],
334 LIST_ENTRY( heap
->freeList
[i
].arena
.entry
.prev
, ARENA_FREE
, entry
),
335 LIST_ENTRY( heap
->freeList
[i
].arena
.entry
.next
, ARENA_FREE
, entry
));
337 LIST_FOR_EACH_ENTRY( subheap
, &heap
->subheap_list
, SUBHEAP
, entry
)
339 SIZE_T freeSize
= 0, usedSize
= 0, arenaSize
= subheap
->headerSize
;
340 DPRINTF( "\n\nSub-heap %p: base=%p size=%08lx committed=%08lx\n",
341 subheap
, subheap
->base
, subheap
->size
, subheap
->commitSize
);
343 DPRINTF( "\n Block Arena Stat Size Id\n" );
344 ptr
= (char *)subheap
->base
+ subheap
->headerSize
;
345 while (ptr
< (char *)subheap
->base
+ subheap
->size
)
347 if (*(DWORD
*)ptr
& ARENA_FLAG_FREE
)
349 ARENA_FREE
*pArena
= (ARENA_FREE
*)ptr
;
350 DPRINTF( "%p %08x free %08x prev=%p next=%p\n",
351 pArena
, pArena
->magic
,
352 pArena
->size
& ARENA_SIZE_MASK
,
353 LIST_ENTRY( pArena
->entry
.prev
, ARENA_FREE
, entry
),
354 LIST_ENTRY( pArena
->entry
.next
, ARENA_FREE
, entry
) );
355 ptr
+= sizeof(*pArena
) + (pArena
->size
& ARENA_SIZE_MASK
);
356 arenaSize
+= sizeof(ARENA_FREE
);
357 freeSize
+= pArena
->size
& ARENA_SIZE_MASK
;
359 else if (*(DWORD
*)ptr
& ARENA_FLAG_PREV_FREE
)
361 ARENA_INUSE
*pArena
= (ARENA_INUSE
*)ptr
;
362 DPRINTF( "%p %08x Used %08x back=%p\n",
363 pArena
, pArena
->magic
, pArena
->size
& ARENA_SIZE_MASK
, *((ARENA_FREE
**)pArena
- 1) );
364 ptr
+= sizeof(*pArena
) + (pArena
->size
& ARENA_SIZE_MASK
);
365 arenaSize
+= sizeof(ARENA_INUSE
);
366 usedSize
+= pArena
->size
& ARENA_SIZE_MASK
;
370 ARENA_INUSE
*pArena
= (ARENA_INUSE
*)ptr
;
371 DPRINTF( "%p %08x %s %08x\n",
372 pArena
, pArena
->magic
, pArena
->magic
== ARENA_INUSE_MAGIC
? "used" : "pend",
373 pArena
->size
& ARENA_SIZE_MASK
);
374 ptr
+= sizeof(*pArena
) + (pArena
->size
& ARENA_SIZE_MASK
);
375 arenaSize
+= sizeof(ARENA_INUSE
);
376 usedSize
+= pArena
->size
& ARENA_SIZE_MASK
;
379 DPRINTF( "\nTotal: Size=%08lx Committed=%08lx Free=%08lx Used=%08lx Arenas=%08lx (%ld%%)\n\n",
380 subheap
->size
, subheap
->commitSize
, freeSize
, usedSize
,
381 arenaSize
, (arenaSize
* 100) / subheap
->size
);
386 static void HEAP_DumpEntry( LPPROCESS_HEAP_ENTRY entry
)
389 TRACE( "Dumping entry %p\n", entry
);
390 TRACE( "lpData\t\t: %p\n", entry
->lpData
);
391 TRACE( "cbData\t\t: %08x\n", entry
->cbData
);
392 TRACE( "cbOverhead\t: %08x\n", entry
->cbOverhead
);
393 TRACE( "iRegionIndex\t: %08x\n", entry
->iRegionIndex
);
394 TRACE( "WFlags\t\t: ");
395 if (entry
->wFlags
& PROCESS_HEAP_REGION
)
396 TRACE( "PROCESS_HEAP_REGION ");
397 if (entry
->wFlags
& PROCESS_HEAP_UNCOMMITTED_RANGE
)
398 TRACE( "PROCESS_HEAP_UNCOMMITTED_RANGE ");
399 if (entry
->wFlags
& PROCESS_HEAP_ENTRY_BUSY
)
400 TRACE( "PROCESS_HEAP_ENTRY_BUSY ");
401 if (entry
->wFlags
& PROCESS_HEAP_ENTRY_MOVEABLE
)
402 TRACE( "PROCESS_HEAP_ENTRY_MOVEABLE ");
403 if (entry
->wFlags
& PROCESS_HEAP_ENTRY_DDESHARE
)
404 TRACE( "PROCESS_HEAP_ENTRY_DDESHARE ");
405 rem_flags
= entry
->wFlags
&
406 ~(PROCESS_HEAP_REGION
| PROCESS_HEAP_UNCOMMITTED_RANGE
|
407 PROCESS_HEAP_ENTRY_BUSY
| PROCESS_HEAP_ENTRY_MOVEABLE
|
408 PROCESS_HEAP_ENTRY_DDESHARE
);
410 TRACE( "Unknown %08x", rem_flags
);
412 if ((entry
->wFlags
& PROCESS_HEAP_ENTRY_BUSY
)
413 && (entry
->wFlags
& PROCESS_HEAP_ENTRY_MOVEABLE
))
416 TRACE( "BLOCK->hMem\t\t:%p\n", entry
->u
.Block
.hMem
);
418 if (entry
->wFlags
& PROCESS_HEAP_REGION
)
420 TRACE( "Region.dwCommittedSize\t:%08x\n",entry
->u
.Region
.dwCommittedSize
);
421 TRACE( "Region.dwUnCommittedSize\t:%08x\n",entry
->u
.Region
.dwUnCommittedSize
);
422 TRACE( "Region.lpFirstBlock\t:%p\n",entry
->u
.Region
.lpFirstBlock
);
423 TRACE( "Region.lpLastBlock\t:%p\n",entry
->u
.Region
.lpLastBlock
);
427 /***********************************************************************
430 * Pointer to the heap
433 static HEAP
*HEAP_GetPtr(
434 HANDLE heap
/* [in] Handle to the heap */
436 HEAP
*heapPtr
= heap
;
437 if (!heapPtr
|| (heapPtr
->magic
!= HEAP_MAGIC
))
439 ERR("Invalid heap %p!\n", heap
);
442 if ((heapPtr
->flags
& HEAP_VALIDATE_ALL
) && !HEAP_IsRealArena( heapPtr
, 0, NULL
, NOISY
))
446 HEAP_Dump( heapPtr
);
455 /***********************************************************************
456 * HEAP_InsertFreeBlock
458 * Insert a free block into the free list.
460 static inline void HEAP_InsertFreeBlock( HEAP
*heap
, ARENA_FREE
*pArena
, BOOL last
)
462 FREE_LIST_ENTRY
*pEntry
= heap
->freeList
+ get_freelist_index( pArena
->size
+ sizeof(*pArena
) );
465 /* insert at end of free list, i.e. before the next free list entry */
467 if (pEntry
== &heap
->freeList
[HEAP_NB_FREE_LISTS
]) pEntry
= heap
->freeList
;
468 list_add_before( &pEntry
->arena
.entry
, &pArena
->entry
);
472 /* insert at head of free list */
473 list_add_after( &pEntry
->arena
.entry
, &pArena
->entry
);
475 pArena
->size
|= ARENA_FLAG_FREE
;
479 /***********************************************************************
481 * Find the sub-heap containing a given address.
487 static SUBHEAP
*HEAP_FindSubHeap(
488 const HEAP
*heap
, /* [in] Heap pointer */
489 LPCVOID ptr
) /* [in] Address */
492 LIST_FOR_EACH_ENTRY( sub
, &heap
->subheap_list
, SUBHEAP
, entry
)
493 if ((ptr
>= sub
->base
) &&
494 ((const char *)ptr
< (const char *)sub
->base
+ sub
->size
- sizeof(ARENA_INUSE
)))
500 /***********************************************************************
503 * Make sure the heap storage is committed for a given size in the specified arena.
505 static inline BOOL
HEAP_Commit( SUBHEAP
*subheap
, ARENA_INUSE
*pArena
, SIZE_T data_size
)
507 void *ptr
= (char *)(pArena
+ 1) + data_size
+ sizeof(ARENA_FREE
);
508 SIZE_T size
= (char *)ptr
- (char *)subheap
->base
;
509 size
= (size
+ COMMIT_MASK
) & ~COMMIT_MASK
;
510 if (size
> subheap
->size
) size
= subheap
->size
;
511 if (size
<= subheap
->commitSize
) return TRUE
;
512 size
-= subheap
->commitSize
;
513 ptr
= (char *)subheap
->base
+ subheap
->commitSize
;
514 if (NtAllocateVirtualMemory( NtCurrentProcess(), &ptr
, 0,
515 &size
, MEM_COMMIT
, get_protection_type( subheap
->heap
->flags
) ))
517 WARN("Could not commit %08lx bytes at %p for heap %p\n",
518 size
, ptr
, subheap
->heap
);
521 subheap
->commitSize
+= size
;
526 /***********************************************************************
529 * If possible, decommit the heap storage from (including) 'ptr'.
531 static inline BOOL
HEAP_Decommit( SUBHEAP
*subheap
, void *ptr
)
534 SIZE_T decommit_size
;
535 SIZE_T size
= (char *)ptr
- (char *)subheap
->base
;
537 /* round to next block and add one full block */
538 size
= ((size
+ COMMIT_MASK
) & ~COMMIT_MASK
) + COMMIT_MASK
+ 1;
539 size
= max( size
, subheap
->min_commit
);
540 if (size
>= subheap
->commitSize
) return TRUE
;
541 decommit_size
= subheap
->commitSize
- size
;
542 addr
= (char *)subheap
->base
+ size
;
544 if (NtFreeVirtualMemory( NtCurrentProcess(), &addr
, &decommit_size
, MEM_DECOMMIT
))
546 WARN("Could not decommit %08lx bytes at %p for heap %p\n",
547 decommit_size
, (char *)subheap
->base
+ size
, subheap
->heap
);
550 subheap
->commitSize
-= decommit_size
;
555 /***********************************************************************
556 * HEAP_CreateFreeBlock
558 * Create a free block at a specified address. 'size' is the size of the
559 * whole block, including the new arena.
561 static void HEAP_CreateFreeBlock( SUBHEAP
*subheap
, void *ptr
, SIZE_T size
)
566 DWORD flags
= subheap
->heap
->flags
;
568 /* Create a free arena */
569 mark_block_uninitialized( ptr
, sizeof(ARENA_FREE
) );
571 pFree
->magic
= ARENA_FREE_MAGIC
;
573 /* If debugging, erase the freed block content */
575 pEnd
= (char *)ptr
+ size
;
576 if (pEnd
> (char *)subheap
->base
+ subheap
->commitSize
)
577 pEnd
= (char *)subheap
->base
+ subheap
->commitSize
;
578 if (pEnd
> (char *)(pFree
+ 1)) mark_block_free( pFree
+ 1, pEnd
- (char *)(pFree
+ 1), flags
);
580 /* Check if next block is free also */
582 if (((char *)ptr
+ size
< (char *)subheap
->base
+ subheap
->size
) &&
583 (*(DWORD
*)((char *)ptr
+ size
) & ARENA_FLAG_FREE
))
585 /* Remove the next arena from the free list */
586 ARENA_FREE
*pNext
= (ARENA_FREE
*)((char *)ptr
+ size
);
587 list_remove( &pNext
->entry
);
588 size
+= (pNext
->size
& ARENA_SIZE_MASK
) + sizeof(*pNext
);
589 mark_block_free( pNext
, sizeof(ARENA_FREE
), flags
);
592 /* Set the next block PREV_FREE flag and pointer */
594 last
= ((char *)ptr
+ size
>= (char *)subheap
->base
+ subheap
->size
);
597 DWORD
*pNext
= (DWORD
*)((char *)ptr
+ size
);
598 *pNext
|= ARENA_FLAG_PREV_FREE
;
599 mark_block_initialized( (ARENA_FREE
**)pNext
- 1, sizeof( ARENA_FREE
* ) );
600 *((ARENA_FREE
**)pNext
- 1) = pFree
;
603 /* Last, insert the new block into the free list */
605 pFree
->size
= size
- sizeof(*pFree
);
606 HEAP_InsertFreeBlock( subheap
->heap
, pFree
, last
);
610 /***********************************************************************
611 * HEAP_MakeInUseBlockFree
613 * Turn an in-use block into a free block. Can also decommit the end of
614 * the heap, and possibly even free the sub-heap altogether.
616 static void HEAP_MakeInUseBlockFree( SUBHEAP
*subheap
, ARENA_INUSE
*pArena
)
618 HEAP
*heap
= subheap
->heap
;
622 if (heap
->pending_free
)
624 ARENA_INUSE
*prev
= heap
->pending_free
[heap
->pending_pos
];
625 heap
->pending_free
[heap
->pending_pos
] = pArena
;
626 heap
->pending_pos
= (heap
->pending_pos
+ 1) % MAX_FREE_PENDING
;
627 pArena
->magic
= ARENA_PENDING_MAGIC
;
628 mark_block_free( pArena
+ 1, pArena
->size
& ARENA_SIZE_MASK
, heap
->flags
);
631 subheap
= HEAP_FindSubHeap( heap
, pArena
);
634 /* Check if we can merge with previous block */
636 size
= (pArena
->size
& ARENA_SIZE_MASK
) + sizeof(*pArena
);
637 if (pArena
->size
& ARENA_FLAG_PREV_FREE
)
639 pFree
= *((ARENA_FREE
**)pArena
- 1);
640 size
+= (pFree
->size
& ARENA_SIZE_MASK
) + sizeof(ARENA_FREE
);
641 /* Remove it from the free list */
642 list_remove( &pFree
->entry
);
644 else pFree
= (ARENA_FREE
*)pArena
;
646 /* Create a free block */
648 HEAP_CreateFreeBlock( subheap
, pFree
, size
);
649 size
= (pFree
->size
& ARENA_SIZE_MASK
) + sizeof(ARENA_FREE
);
650 if ((char *)pFree
+ size
< (char *)subheap
->base
+ subheap
->size
)
651 return; /* Not the last block, so nothing more to do */
653 /* Free the whole sub-heap if it's empty and not the original one */
655 if (((char *)pFree
== (char *)subheap
->base
+ subheap
->headerSize
) &&
656 (subheap
!= &subheap
->heap
->subheap
))
658 void *addr
= subheap
->base
;
661 /* Remove the free block from the list */
662 list_remove( &pFree
->entry
);
663 /* Remove the subheap from the list */
664 list_remove( &subheap
->entry
);
665 /* Free the memory */
667 NtFreeVirtualMemory( NtCurrentProcess(), &addr
, &size
, MEM_RELEASE
);
671 /* Decommit the end of the heap */
673 if (!(subheap
->heap
->flags
& HEAP_SHARED
)) HEAP_Decommit( subheap
, pFree
+ 1 );
677 /***********************************************************************
680 * Shrink an in-use block.
682 static void HEAP_ShrinkBlock(SUBHEAP
*subheap
, ARENA_INUSE
*pArena
, SIZE_T size
)
684 if ((pArena
->size
& ARENA_SIZE_MASK
) >= size
+ HEAP_MIN_SHRINK_SIZE
)
686 HEAP_CreateFreeBlock( subheap
, (char *)(pArena
+ 1) + size
,
687 (pArena
->size
& ARENA_SIZE_MASK
) - size
);
688 /* assign size plus previous arena flags */
689 pArena
->size
= size
| (pArena
->size
& ~ARENA_SIZE_MASK
);
693 /* Turn off PREV_FREE flag in next block */
694 char *pNext
= (char *)(pArena
+ 1) + (pArena
->size
& ARENA_SIZE_MASK
);
695 if (pNext
< (char *)subheap
->base
+ subheap
->size
)
696 *(DWORD
*)pNext
&= ~ARENA_FLAG_PREV_FREE
;
701 /***********************************************************************
702 * allocate_large_block
704 static void *allocate_large_block( HEAP
*heap
, DWORD flags
, SIZE_T size
)
707 SIZE_T block_size
= sizeof(*arena
) + ROUND_SIZE(size
) + HEAP_TAIL_EXTRA_SIZE(flags
);
708 LPVOID address
= NULL
;
710 if (block_size
< size
) return NULL
; /* overflow */
711 if (NtAllocateVirtualMemory( NtCurrentProcess(), &address
, 5,
712 &block_size
, MEM_COMMIT
, get_protection_type( flags
) ))
714 WARN("Could not allocate block for %08lx bytes\n", size
);
718 arena
->data_size
= size
;
719 arena
->block_size
= block_size
;
720 arena
->size
= ARENA_LARGE_SIZE
;
721 arena
->magic
= ARENA_LARGE_MAGIC
;
722 mark_block_tail( (char *)(arena
+ 1) + size
, block_size
- sizeof(*arena
) - size
, flags
);
723 list_add_tail( &heap
->large_list
, &arena
->entry
);
724 notify_alloc( arena
+ 1, size
, flags
& HEAP_ZERO_MEMORY
);
729 /***********************************************************************
732 static void free_large_block( HEAP
*heap
, DWORD flags
, void *ptr
)
734 ARENA_LARGE
*arena
= (ARENA_LARGE
*)ptr
- 1;
735 LPVOID address
= arena
;
738 list_remove( &arena
->entry
);
739 NtFreeVirtualMemory( NtCurrentProcess(), &address
, &size
, MEM_RELEASE
);
743 /***********************************************************************
744 * realloc_large_block
746 static void *realloc_large_block( HEAP
*heap
, DWORD flags
, void *ptr
, SIZE_T size
)
748 ARENA_LARGE
*arena
= (ARENA_LARGE
*)ptr
- 1;
751 if (arena
->block_size
- sizeof(*arena
) >= size
)
753 SIZE_T unused
= arena
->block_size
- sizeof(*arena
) - size
;
755 /* FIXME: we could remap zero-pages instead */
756 if (size
> arena
->data_size
)
757 initialize_block( (char *)ptr
+ arena
->data_size
, size
- arena
->data_size
, unused
, flags
);
759 mark_block_tail( (char *)ptr
+ size
, unused
, flags
);
760 arena
->data_size
= size
;
763 if (flags
& HEAP_REALLOC_IN_PLACE_ONLY
) return NULL
;
764 if (!(new_ptr
= allocate_large_block( heap
, flags
, size
)))
766 WARN("Could not allocate block for %08lx bytes\n", size
);
769 memcpy( new_ptr
, ptr
, arena
->data_size
);
770 free_large_block( heap
, flags
, ptr
);
776 /***********************************************************************
779 static ARENA_LARGE
*find_large_block( HEAP
*heap
, const void *ptr
)
783 LIST_FOR_EACH_ENTRY( arena
, &heap
->large_list
, ARENA_LARGE
, entry
)
784 if (ptr
== arena
+ 1) return arena
;
790 /***********************************************************************
791 * validate_large_arena
793 static BOOL
validate_large_arena( HEAP
*heap
, const ARENA_LARGE
*arena
, BOOL quiet
)
795 DWORD flags
= heap
->flags
;
797 if ((ULONG_PTR
)arena
% getpagesize())
801 ERR( "Heap %p: invalid large arena pointer %p\n", heap
, arena
);
802 if (TRACE_ON(heap
)) HEAP_Dump( heap
);
804 else if (WARN_ON(heap
))
806 WARN( "Heap %p: unaligned arena pointer %p\n", heap
, arena
);
807 if (TRACE_ON(heap
)) HEAP_Dump( heap
);
811 if (arena
->size
!= ARENA_LARGE_SIZE
|| arena
->magic
!= ARENA_LARGE_MAGIC
)
815 ERR( "Heap %p: invalid large arena %p values %x/%x\n",
816 heap
, arena
, arena
->size
, arena
->magic
);
817 if (TRACE_ON(heap
)) HEAP_Dump( heap
);
819 else if (WARN_ON(heap
))
821 WARN( "Heap %p: invalid large arena %p values %x/%x\n",
822 heap
, arena
, arena
->size
, arena
->magic
);
823 if (TRACE_ON(heap
)) HEAP_Dump( heap
);
827 if (arena
->data_size
> arena
->block_size
- sizeof(*arena
))
829 ERR( "Heap %p: invalid large arena %p size %lx/%lx\n",
830 heap
, arena
, arena
->data_size
, arena
->block_size
);
833 if (flags
& HEAP_TAIL_CHECKING_ENABLED
)
835 SIZE_T i
, unused
= arena
->block_size
- sizeof(*arena
) - arena
->data_size
;
836 const unsigned char *data
= (const unsigned char *)(arena
+ 1) + arena
->data_size
;
838 for (i
= 0; i
< unused
; i
++)
840 if (data
[i
] == ARENA_TAIL_FILLER
) continue;
841 ERR("Heap %p: block %p tail overwritten at %p (byte %lu/%lu == 0x%02x)\n",
842 heap
, arena
+ 1, data
+ i
, i
, unused
, data
[i
] );
850 /***********************************************************************
853 static SUBHEAP
*HEAP_CreateSubHeap( HEAP
*heap
, LPVOID address
, DWORD flags
,
854 SIZE_T commitSize
, SIZE_T totalSize
)
857 FREE_LIST_ENTRY
*pEntry
;
862 if (!commitSize
) commitSize
= COMMIT_MASK
+ 1;
863 totalSize
= min( totalSize
, 0xffff0000 ); /* don't allow a heap larger than 4Gb */
864 if (totalSize
< commitSize
) totalSize
= commitSize
;
865 if (flags
& HEAP_SHARED
) commitSize
= totalSize
; /* always commit everything in a shared heap */
866 commitSize
= min( totalSize
, (commitSize
+ COMMIT_MASK
) & ~COMMIT_MASK
);
868 /* allocate the memory block */
869 if (NtAllocateVirtualMemory( NtCurrentProcess(), &address
, 5, &totalSize
,
870 MEM_RESERVE
, get_protection_type( flags
) ))
872 WARN("Could not allocate %08lx bytes\n", totalSize
);
875 if (NtAllocateVirtualMemory( NtCurrentProcess(), &address
, 0,
876 &commitSize
, MEM_COMMIT
, get_protection_type( flags
) ))
878 WARN("Could not commit %08lx bytes for sub-heap %p\n", commitSize
, address
);
885 /* If this is a secondary subheap, insert it into list */
888 subheap
->base
= address
;
889 subheap
->heap
= heap
;
890 subheap
->size
= totalSize
;
891 subheap
->min_commit
= 0x10000;
892 subheap
->commitSize
= commitSize
;
893 subheap
->magic
= SUBHEAP_MAGIC
;
894 subheap
->headerSize
= ROUND_SIZE( sizeof(SUBHEAP
) );
895 list_add_head( &heap
->subheap_list
, &subheap
->entry
);
899 /* If this is a primary subheap, initialize main heap */
903 heap
->magic
= HEAP_MAGIC
;
904 heap
->grow_size
= max( HEAP_DEF_SIZE
, totalSize
);
905 list_init( &heap
->subheap_list
);
906 list_init( &heap
->large_list
);
908 subheap
= &heap
->subheap
;
909 subheap
->base
= address
;
910 subheap
->heap
= heap
;
911 subheap
->size
= totalSize
;
912 subheap
->min_commit
= commitSize
;
913 subheap
->commitSize
= commitSize
;
914 subheap
->magic
= SUBHEAP_MAGIC
;
915 subheap
->headerSize
= ROUND_SIZE( sizeof(HEAP
) );
916 list_add_head( &heap
->subheap_list
, &subheap
->entry
);
918 /* Build the free lists */
920 heap
->freeList
= (FREE_LIST_ENTRY
*)((char *)heap
+ subheap
->headerSize
);
921 subheap
->headerSize
+= HEAP_NB_FREE_LISTS
* sizeof(FREE_LIST_ENTRY
);
922 list_init( &heap
->freeList
[0].arena
.entry
);
923 for (i
= 0, pEntry
= heap
->freeList
; i
< HEAP_NB_FREE_LISTS
; i
++, pEntry
++)
925 pEntry
->arena
.size
= 0 | ARENA_FLAG_FREE
;
926 pEntry
->arena
.magic
= ARENA_FREE_MAGIC
;
927 if (i
) list_add_after( &pEntry
[-1].arena
.entry
, &pEntry
->arena
.entry
);
930 /* Initialize critical section */
932 if (!processHeap
) /* do it by hand to avoid memory allocations */
934 heap
->critSection
.DebugInfo
= &process_heap_critsect_debug
;
935 heap
->critSection
.LockCount
= -1;
936 heap
->critSection
.RecursionCount
= 0;
937 heap
->critSection
.OwningThread
= 0;
938 heap
->critSection
.LockSemaphore
= 0;
939 heap
->critSection
.SpinCount
= 0;
940 process_heap_critsect_debug
.CriticalSection
= &heap
->critSection
;
944 RtlInitializeCriticalSection( &heap
->critSection
);
945 heap
->critSection
.DebugInfo
->Spare
[0] = (DWORD_PTR
)(__FILE__
": HEAP.critSection");
948 if (flags
& HEAP_SHARED
)
950 /* let's assume that only one thread at a time will try to do this */
951 HANDLE sem
= heap
->critSection
.LockSemaphore
;
952 if (!sem
) NtCreateSemaphore( &sem
, SEMAPHORE_ALL_ACCESS
, NULL
, 0, 1 );
954 NtDuplicateObject( NtCurrentProcess(), sem
, NtCurrentProcess(), &sem
, 0, 0,
955 DUP_HANDLE_MAKE_GLOBAL
| DUP_HANDLE_SAME_ACCESS
| DUP_HANDLE_CLOSE_SOURCE
);
956 heap
->critSection
.LockSemaphore
= sem
;
957 RtlFreeHeap( processHeap
, 0, heap
->critSection
.DebugInfo
);
958 heap
->critSection
.DebugInfo
= NULL
;
962 /* Create the first free block */
964 HEAP_CreateFreeBlock( subheap
, (LPBYTE
)subheap
->base
+ subheap
->headerSize
,
965 subheap
->size
- subheap
->headerSize
);
971 /***********************************************************************
974 * Find a free block at least as large as the requested size, and make sure
975 * the requested size is committed.
977 static ARENA_FREE
*HEAP_FindFreeBlock( HEAP
*heap
, SIZE_T size
,
978 SUBHEAP
**ppSubHeap
)
983 FREE_LIST_ENTRY
*pEntry
= heap
->freeList
+ get_freelist_index( size
+ sizeof(ARENA_INUSE
) );
985 /* Find a suitable free list, and in it find a block large enough */
987 ptr
= &pEntry
->arena
.entry
;
988 while ((ptr
= list_next( &heap
->freeList
[0].arena
.entry
, ptr
)))
990 ARENA_FREE
*pArena
= LIST_ENTRY( ptr
, ARENA_FREE
, entry
);
991 SIZE_T arena_size
= (pArena
->size
& ARENA_SIZE_MASK
) +
992 sizeof(ARENA_FREE
) - sizeof(ARENA_INUSE
);
993 if (arena_size
>= size
)
995 subheap
= HEAP_FindSubHeap( heap
, pArena
);
996 if (!HEAP_Commit( subheap
, (ARENA_INUSE
*)pArena
, size
)) return NULL
;
997 *ppSubHeap
= subheap
;
1002 /* If no block was found, attempt to grow the heap */
1004 if (!(heap
->flags
& HEAP_GROWABLE
))
1006 WARN("Not enough space in heap %p for %08lx bytes\n", heap
, size
);
1009 /* make sure that we have a big enough size *committed* to fit another
1010 * last free arena in !
1011 * So just one heap struct, one first free arena which will eventually
1012 * get used, and a second free arena that might get assigned all remaining
1013 * free space in HEAP_ShrinkBlock() */
1014 total_size
= size
+ ROUND_SIZE(sizeof(SUBHEAP
)) + sizeof(ARENA_INUSE
) + sizeof(ARENA_FREE
);
1015 if (total_size
< size
) return NULL
; /* overflow */
1017 if ((subheap
= HEAP_CreateSubHeap( heap
, NULL
, heap
->flags
, total_size
,
1018 max( heap
->grow_size
, total_size
) )))
1020 if (heap
->grow_size
< 128 * 1024 * 1024) heap
->grow_size
*= 2;
1022 else while (!subheap
) /* shrink the grow size again if we are running out of space */
1024 if (heap
->grow_size
<= total_size
|| heap
->grow_size
<= 4 * 1024 * 1024) return NULL
;
1025 heap
->grow_size
/= 2;
1026 subheap
= HEAP_CreateSubHeap( heap
, NULL
, heap
->flags
, total_size
,
1027 max( heap
->grow_size
, total_size
) );
1030 TRACE("created new sub-heap %p of %08lx bytes for heap %p\n",
1031 subheap
, subheap
->size
, heap
);
1033 *ppSubHeap
= subheap
;
1034 return (ARENA_FREE
*)((char *)subheap
->base
+ subheap
->headerSize
);
1038 /***********************************************************************
1039 * HEAP_IsValidArenaPtr
1041 * Check that the pointer is inside the range possible for arenas.
1043 static BOOL
HEAP_IsValidArenaPtr( const HEAP
*heap
, const ARENA_FREE
*ptr
)
1046 const SUBHEAP
*subheap
= HEAP_FindSubHeap( heap
, ptr
);
1047 if (!subheap
) return FALSE
;
1048 if ((const char *)ptr
>= (const char *)subheap
->base
+ subheap
->headerSize
) return TRUE
;
1049 if (subheap
!= &heap
->subheap
) return FALSE
;
1050 for (i
= 0; i
< HEAP_NB_FREE_LISTS
; i
++)
1051 if (ptr
== &heap
->freeList
[i
].arena
) return TRUE
;
1056 /***********************************************************************
1057 * HEAP_ValidateFreeArena
1059 static BOOL
HEAP_ValidateFreeArena( SUBHEAP
*subheap
, ARENA_FREE
*pArena
)
1061 DWORD flags
= subheap
->heap
->flags
;
1063 ARENA_FREE
*prev
, *next
;
1064 char *heapEnd
= (char *)subheap
->base
+ subheap
->size
;
1066 /* Check for unaligned pointers */
1067 if ((ULONG_PTR
)pArena
% ALIGNMENT
!= ARENA_OFFSET
)
1069 ERR("Heap %p: unaligned arena pointer %p\n", subheap
->heap
, pArena
);
1073 /* Check magic number */
1074 if (pArena
->magic
!= ARENA_FREE_MAGIC
)
1076 ERR("Heap %p: invalid free arena magic %08x for %p\n", subheap
->heap
, pArena
->magic
, pArena
);
1079 /* Check size flags */
1080 if (!(pArena
->size
& ARENA_FLAG_FREE
) ||
1081 (pArena
->size
& ARENA_FLAG_PREV_FREE
))
1083 ERR("Heap %p: bad flags %08x for free arena %p\n",
1084 subheap
->heap
, pArena
->size
& ~ARENA_SIZE_MASK
, pArena
);
1087 /* Check arena size */
1088 size
= pArena
->size
& ARENA_SIZE_MASK
;
1089 if ((char *)(pArena
+ 1) + size
> heapEnd
)
1091 ERR("Heap %p: bad size %08lx for free arena %p\n", subheap
->heap
, size
, pArena
);
1094 /* Check that next pointer is valid */
1095 next
= LIST_ENTRY( pArena
->entry
.next
, ARENA_FREE
, entry
);
1096 if (!HEAP_IsValidArenaPtr( subheap
->heap
, next
))
1098 ERR("Heap %p: bad next ptr %p for arena %p\n",
1099 subheap
->heap
, next
, pArena
);
1102 /* Check that next arena is free */
1103 if (!(next
->size
& ARENA_FLAG_FREE
) || (next
->magic
!= ARENA_FREE_MAGIC
))
1105 ERR("Heap %p: next arena %p invalid for %p\n",
1106 subheap
->heap
, next
, pArena
);
1109 /* Check that prev pointer is valid */
1110 prev
= LIST_ENTRY( pArena
->entry
.prev
, ARENA_FREE
, entry
);
1111 if (!HEAP_IsValidArenaPtr( subheap
->heap
, prev
))
1113 ERR("Heap %p: bad prev ptr %p for arena %p\n",
1114 subheap
->heap
, prev
, pArena
);
1117 /* Check that prev arena is free */
1118 if (!(prev
->size
& ARENA_FLAG_FREE
) || (prev
->magic
!= ARENA_FREE_MAGIC
))
1120 /* this often means that the prev arena got overwritten
1121 * by a memory write before that prev arena */
1122 ERR("Heap %p: prev arena %p invalid for %p\n",
1123 subheap
->heap
, prev
, pArena
);
1126 /* Check that next block has PREV_FREE flag */
1127 if ((char *)(pArena
+ 1) + size
< heapEnd
)
1129 if (!(*(DWORD
*)((char *)(pArena
+ 1) + size
) & ARENA_FLAG_PREV_FREE
))
1131 ERR("Heap %p: free arena %p next block has no PREV_FREE flag\n",
1132 subheap
->heap
, pArena
);
1135 /* Check next block back pointer */
1136 if (*((ARENA_FREE
**)((char *)(pArena
+ 1) + size
) - 1) != pArena
)
1138 ERR("Heap %p: arena %p has wrong back ptr %p\n",
1139 subheap
->heap
, pArena
,
1140 *((ARENA_FREE
**)((char *)(pArena
+1) + size
) - 1));
1144 if (flags
& HEAP_FREE_CHECKING_ENABLED
)
1146 DWORD
*ptr
= (DWORD
*)(pArena
+ 1);
1147 char *end
= (char *)(pArena
+ 1) + size
;
1149 if (end
>= heapEnd
) end
= (char *)subheap
->base
+ subheap
->commitSize
;
1150 else end
-= sizeof(ARENA_FREE
*);
1151 while (ptr
< (DWORD
*)end
)
1153 if (*ptr
!= ARENA_FREE_FILLER
)
1155 ERR("Heap %p: free block %p overwritten at %p by %08x\n",
1156 subheap
->heap
, (ARENA_INUSE
*)pArena
+ 1, ptr
, *ptr
);
1166 /***********************************************************************
1167 * HEAP_ValidateInUseArena
1169 static BOOL
HEAP_ValidateInUseArena( const SUBHEAP
*subheap
, const ARENA_INUSE
*pArena
, BOOL quiet
)
1172 DWORD i
, flags
= subheap
->heap
->flags
;
1173 const char *heapEnd
= (const char *)subheap
->base
+ subheap
->size
;
1175 /* Check for unaligned pointers */
1176 if ((ULONG_PTR
)pArena
% ALIGNMENT
!= ARENA_OFFSET
)
1178 if ( quiet
== NOISY
)
1180 ERR( "Heap %p: unaligned arena pointer %p\n", subheap
->heap
, pArena
);
1181 if ( TRACE_ON(heap
) )
1182 HEAP_Dump( subheap
->heap
);
1184 else if ( WARN_ON(heap
) )
1186 WARN( "Heap %p: unaligned arena pointer %p\n", subheap
->heap
, pArena
);
1187 if ( TRACE_ON(heap
) )
1188 HEAP_Dump( subheap
->heap
);
1193 /* Check magic number */
1194 if (pArena
->magic
!= ARENA_INUSE_MAGIC
&& pArena
->magic
!= ARENA_PENDING_MAGIC
)
1196 if (quiet
== NOISY
) {
1197 ERR("Heap %p: invalid in-use arena magic %08x for %p\n", subheap
->heap
, pArena
->magic
, pArena
);
1199 HEAP_Dump( subheap
->heap
);
1200 } else if (WARN_ON(heap
)) {
1201 WARN("Heap %p: invalid in-use arena magic %08x for %p\n", subheap
->heap
, pArena
->magic
, pArena
);
1203 HEAP_Dump( subheap
->heap
);
1207 /* Check size flags */
1208 if (pArena
->size
& ARENA_FLAG_FREE
)
1210 ERR("Heap %p: bad flags %08x for in-use arena %p\n",
1211 subheap
->heap
, pArena
->size
& ~ARENA_SIZE_MASK
, pArena
);
1214 /* Check arena size */
1215 size
= pArena
->size
& ARENA_SIZE_MASK
;
1216 if ((const char *)(pArena
+ 1) + size
> heapEnd
||
1217 (const char *)(pArena
+ 1) + size
< (const char *)(pArena
+ 1))
1219 ERR("Heap %p: bad size %08lx for in-use arena %p\n", subheap
->heap
, size
, pArena
);
1222 /* Check next arena PREV_FREE flag */
1223 if (((const char *)(pArena
+ 1) + size
< heapEnd
) &&
1224 (*(const DWORD
*)((const char *)(pArena
+ 1) + size
) & ARENA_FLAG_PREV_FREE
))
1226 ERR("Heap %p: in-use arena %p next block %p has PREV_FREE flag %x\n",
1227 subheap
->heap
, pArena
, (const char *)(pArena
+ 1) + size
,*(const DWORD
*)((const char *)(pArena
+ 1) + size
) );
1230 /* Check prev free arena */
1231 if (pArena
->size
& ARENA_FLAG_PREV_FREE
)
1233 const ARENA_FREE
*pPrev
= *((const ARENA_FREE
* const*)pArena
- 1);
1234 /* Check prev pointer */
1235 if (!HEAP_IsValidArenaPtr( subheap
->heap
, pPrev
))
1237 ERR("Heap %p: bad back ptr %p for arena %p\n",
1238 subheap
->heap
, pPrev
, pArena
);
1241 /* Check that prev arena is free */
1242 if (!(pPrev
->size
& ARENA_FLAG_FREE
) ||
1243 (pPrev
->magic
!= ARENA_FREE_MAGIC
))
1245 ERR("Heap %p: prev arena %p invalid for in-use %p\n",
1246 subheap
->heap
, pPrev
, pArena
);
1249 /* Check that prev arena is really the previous block */
1250 if ((const char *)(pPrev
+ 1) + (pPrev
->size
& ARENA_SIZE_MASK
) != (const char *)pArena
)
1252 ERR("Heap %p: prev arena %p is not prev for in-use %p\n",
1253 subheap
->heap
, pPrev
, pArena
);
1257 /* Check unused size */
1258 if (pArena
->unused_bytes
> size
)
1260 ERR("Heap %p: invalid unused size %08x/%08lx\n", subheap
->heap
, pArena
->unused_bytes
, size
);
1263 /* Check unused bytes */
1264 if (pArena
->magic
== ARENA_PENDING_MAGIC
)
1266 const DWORD
*ptr
= (const DWORD
*)(pArena
+ 1);
1267 const DWORD
*end
= (const DWORD
*)((const char *)ptr
+ size
);
1271 if (*ptr
!= ARENA_FREE_FILLER
)
1273 ERR("Heap %p: free block %p overwritten at %p by %08x\n",
1274 subheap
->heap
, (const ARENA_INUSE
*)pArena
+ 1, ptr
, *ptr
);
1275 if (!*ptr
) { HEAP_Dump( subheap
->heap
); DbgBreakPoint(); }
1281 else if (flags
& HEAP_TAIL_CHECKING_ENABLED
)
1283 const unsigned char *data
= (const unsigned char *)(pArena
+ 1) + size
- pArena
->unused_bytes
;
1285 for (i
= 0; i
< pArena
->unused_bytes
; i
++)
1287 if (data
[i
] == ARENA_TAIL_FILLER
) continue;
1288 ERR("Heap %p: block %p tail overwritten at %p (byte %u/%u == 0x%02x)\n",
1289 subheap
->heap
, pArena
+ 1, data
+ i
, i
, pArena
->unused_bytes
, data
[i
] );
1297 /***********************************************************************
1298 * HEAP_IsRealArena [Internal]
1299 * Validates a block is a valid arena.
1305 static BOOL
HEAP_IsRealArena( HEAP
*heapPtr
, /* [in] ptr to the heap */
1306 DWORD flags
, /* [in] Bit flags that control access during operation */
1307 LPCVOID block
, /* [in] Optional pointer to memory block to validate */
1308 BOOL quiet
) /* [in] Flag - if true, HEAP_ValidateInUseArena
1309 * does not complain */
1313 const ARENA_LARGE
*large_arena
;
1315 flags
&= HEAP_NO_SERIALIZE
;
1316 flags
|= heapPtr
->flags
;
1317 /* calling HeapLock may result in infinite recursion, so do the critsect directly */
1318 if (!(flags
& HEAP_NO_SERIALIZE
))
1319 RtlEnterCriticalSection( &heapPtr
->critSection
);
1321 if (block
) /* only check this single memory block */
1323 const ARENA_INUSE
*arena
= (const ARENA_INUSE
*)block
- 1;
1325 if (!(subheap
= HEAP_FindSubHeap( heapPtr
, arena
)) ||
1326 ((const char *)arena
< (char *)subheap
->base
+ subheap
->headerSize
))
1328 if (!(large_arena
= find_large_block( heapPtr
, block
)))
1331 ERR("Heap %p: block %p is not inside heap\n", heapPtr
, block
);
1332 else if (WARN_ON(heap
))
1333 WARN("Heap %p: block %p is not inside heap\n", heapPtr
, block
);
1337 ret
= validate_large_arena( heapPtr
, large_arena
, quiet
);
1339 ret
= HEAP_ValidateInUseArena( subheap
, arena
, quiet
);
1341 if (!(flags
& HEAP_NO_SERIALIZE
))
1342 RtlLeaveCriticalSection( &heapPtr
->critSection
);
1346 LIST_FOR_EACH_ENTRY( subheap
, &heapPtr
->subheap_list
, SUBHEAP
, entry
)
1348 char *ptr
= (char *)subheap
->base
+ subheap
->headerSize
;
1349 while (ptr
< (char *)subheap
->base
+ subheap
->size
)
1351 if (*(DWORD
*)ptr
& ARENA_FLAG_FREE
)
1353 if (!HEAP_ValidateFreeArena( subheap
, (ARENA_FREE
*)ptr
)) {
1357 ptr
+= sizeof(ARENA_FREE
) + (*(DWORD
*)ptr
& ARENA_SIZE_MASK
);
1361 if (!HEAP_ValidateInUseArena( subheap
, (ARENA_INUSE
*)ptr
, NOISY
)) {
1365 ptr
+= sizeof(ARENA_INUSE
) + (*(DWORD
*)ptr
& ARENA_SIZE_MASK
);
1371 LIST_FOR_EACH_ENTRY( large_arena
, &heapPtr
->large_list
, ARENA_LARGE
, entry
)
1372 if (!(ret
= validate_large_arena( heapPtr
, large_arena
, quiet
))) break;
1374 if (!(flags
& HEAP_NO_SERIALIZE
)) RtlLeaveCriticalSection( &heapPtr
->critSection
);
1379 /***********************************************************************
1380 * validate_block_pointer
1382 * Minimum validation needed to catch bad parameters in heap functions.
1384 static BOOL
validate_block_pointer( HEAP
*heap
, SUBHEAP
**ret_subheap
, const ARENA_INUSE
*arena
)
1389 if (!(*ret_subheap
= subheap
= HEAP_FindSubHeap( heap
, arena
)))
1391 ARENA_LARGE
*large_arena
= find_large_block( heap
, arena
+ 1 );
1395 WARN( "Heap %p: pointer %p is not inside heap\n", heap
, arena
+ 1 );
1398 if ((heap
->flags
& HEAP_VALIDATE
) && !validate_large_arena( heap
, large_arena
, QUIET
))
1403 if ((const char *)arena
< (char *)subheap
->base
+ subheap
->headerSize
)
1404 WARN( "Heap %p: pointer %p is inside subheap %p header\n", subheap
->heap
, arena
+ 1, subheap
);
1405 else if (subheap
->heap
->flags
& HEAP_VALIDATE
) /* do the full validation */
1406 ret
= HEAP_ValidateInUseArena( subheap
, arena
, QUIET
);
1407 else if ((ULONG_PTR
)arena
% ALIGNMENT
!= ARENA_OFFSET
)
1408 WARN( "Heap %p: unaligned arena pointer %p\n", subheap
->heap
, arena
);
1409 else if (arena
->magic
== ARENA_PENDING_MAGIC
)
1410 WARN( "Heap %p: block %p used after free\n", subheap
->heap
, arena
+ 1 );
1411 else if (arena
->magic
!= ARENA_INUSE_MAGIC
)
1412 WARN( "Heap %p: invalid in-use arena magic %08x for %p\n", subheap
->heap
, arena
->magic
, arena
);
1413 else if (arena
->size
& ARENA_FLAG_FREE
)
1414 ERR( "Heap %p: bad flags %08x for in-use arena %p\n",
1415 subheap
->heap
, arena
->size
& ~ARENA_SIZE_MASK
, arena
);
1416 else if ((const char *)(arena
+ 1) + (arena
->size
& ARENA_SIZE_MASK
) > (const char *)subheap
->base
+ subheap
->size
||
1417 (const char *)(arena
+ 1) + (arena
->size
& ARENA_SIZE_MASK
) < (const char *)(arena
+ 1))
1418 ERR( "Heap %p: bad size %08x for in-use arena %p\n",
1419 subheap
->heap
, arena
->size
& ARENA_SIZE_MASK
, arena
);
1427 /***********************************************************************
1428 * heap_set_debug_flags
1430 void heap_set_debug_flags( HANDLE handle
)
1432 HEAP
*heap
= HEAP_GetPtr( handle
);
1433 ULONG global_flags
= RtlGetNtGlobalFlags();
1436 if (TRACE_ON(heap
)) global_flags
|= FLG_HEAP_VALIDATE_ALL
;
1437 if (WARN_ON(heap
)) global_flags
|= FLG_HEAP_VALIDATE_PARAMETERS
;
1439 if (global_flags
& FLG_HEAP_ENABLE_TAIL_CHECK
) flags
|= HEAP_TAIL_CHECKING_ENABLED
;
1440 if (global_flags
& FLG_HEAP_ENABLE_FREE_CHECK
) flags
|= HEAP_FREE_CHECKING_ENABLED
;
1441 if (global_flags
& FLG_HEAP_DISABLE_COALESCING
) flags
|= HEAP_DISABLE_COALESCE_ON_FREE
;
1442 if (global_flags
& FLG_HEAP_PAGE_ALLOCS
) flags
|= HEAP_PAGE_ALLOCS
| HEAP_GROWABLE
;
1444 if (global_flags
& FLG_HEAP_VALIDATE_PARAMETERS
)
1445 flags
|= HEAP_VALIDATE
| HEAP_VALIDATE_PARAMS
|
1446 HEAP_TAIL_CHECKING_ENABLED
| HEAP_FREE_CHECKING_ENABLED
;
1447 if (global_flags
& FLG_HEAP_VALIDATE_ALL
)
1448 flags
|= HEAP_VALIDATE
| HEAP_VALIDATE_ALL
|
1449 HEAP_TAIL_CHECKING_ENABLED
| HEAP_FREE_CHECKING_ENABLED
;
1451 if (RUNNING_ON_VALGRIND
) flags
= 0; /* no sense in validating since Valgrind catches accesses */
1453 heap
->flags
|= flags
;
1454 heap
->force_flags
|= flags
& ~(HEAP_VALIDATE
| HEAP_DISABLE_COALESCE_ON_FREE
);
1456 if (flags
& (HEAP_FREE_CHECKING_ENABLED
| HEAP_TAIL_CHECKING_ENABLED
)) /* fix existing blocks */
1461 LIST_FOR_EACH_ENTRY( subheap
, &heap
->subheap_list
, SUBHEAP
, entry
)
1463 char *ptr
= (char *)subheap
->base
+ subheap
->headerSize
;
1464 char *end
= (char *)subheap
->base
+ subheap
->commitSize
;
1467 ARENA_INUSE
*arena
= (ARENA_INUSE
*)ptr
;
1468 SIZE_T size
= arena
->size
& ARENA_SIZE_MASK
;
1469 if (arena
->size
& ARENA_FLAG_FREE
)
1471 SIZE_T count
= size
;
1473 ptr
+= sizeof(ARENA_FREE
) + size
;
1474 if (ptr
>= end
) count
= end
- (char *)((ARENA_FREE
*)arena
+ 1);
1475 else count
-= sizeof(ARENA_FREE
*);
1476 mark_block_free( (ARENA_FREE
*)arena
+ 1, count
, flags
);
1480 if (arena
->magic
== ARENA_PENDING_MAGIC
)
1481 mark_block_free( arena
+ 1, size
, flags
);
1483 mark_block_tail( (char *)(arena
+ 1) + size
- arena
->unused_bytes
,
1484 arena
->unused_bytes
, flags
);
1485 ptr
+= sizeof(ARENA_INUSE
) + size
;
1490 LIST_FOR_EACH_ENTRY( large
, &heap
->large_list
, ARENA_LARGE
, entry
)
1491 mark_block_tail( (char *)(large
+ 1) + large
->data_size
,
1492 large
->block_size
- sizeof(*large
) - large
->data_size
, flags
);
1495 if ((heap
->flags
& HEAP_GROWABLE
) && !heap
->pending_free
&&
1496 ((flags
& HEAP_FREE_CHECKING_ENABLED
) || RUNNING_ON_VALGRIND
))
1499 SIZE_T size
= MAX_FREE_PENDING
* sizeof(*heap
->pending_free
);
1501 if (!NtAllocateVirtualMemory( NtCurrentProcess(), &ptr
, 4, &size
, MEM_COMMIT
, PAGE_READWRITE
))
1503 heap
->pending_free
= ptr
;
1504 heap
->pending_pos
= 0;
1510 /***********************************************************************
1511 * RtlCreateHeap (NTDLL.@)
1513 * Create a new Heap.
1516 * flags [I] HEAP_ flags from "winnt.h"
1517 * addr [I] Desired base address
1518 * totalSize [I] Total size of the heap, or 0 for a growable heap
1519 * commitSize [I] Amount of heap space to commit
1520 * unknown [I] Not yet understood
1521 * definition [I] Heap definition
1524 * Success: A HANDLE to the newly created heap.
1525 * Failure: a NULL HANDLE.
1527 HANDLE WINAPI
RtlCreateHeap( ULONG flags
, PVOID addr
, SIZE_T totalSize
, SIZE_T commitSize
,
1528 PVOID unknown
, PRTL_HEAP_DEFINITION definition
)
1532 /* Allocate the heap block */
1536 totalSize
= HEAP_DEF_SIZE
;
1537 flags
|= HEAP_GROWABLE
;
1540 if (!(subheap
= HEAP_CreateSubHeap( NULL
, addr
, flags
, commitSize
, totalSize
))) return 0;
1542 heap_set_debug_flags( subheap
->heap
);
1544 /* link it into the per-process heap list */
1547 HEAP
*heapPtr
= subheap
->heap
;
1548 RtlEnterCriticalSection( &processHeap
->critSection
);
1549 list_add_head( &processHeap
->entry
, &heapPtr
->entry
);
1550 RtlLeaveCriticalSection( &processHeap
->critSection
);
1554 processHeap
= subheap
->heap
; /* assume the first heap we create is the process main heap */
1555 list_init( &processHeap
->entry
);
1558 return subheap
->heap
;
1562 /***********************************************************************
1563 * RtlDestroyHeap (NTDLL.@)
1565 * Destroy a Heap created with RtlCreateHeap().
1568 * heap [I] Heap to destroy.
1571 * Success: A NULL HANDLE, if heap is NULL or it was destroyed
1572 * Failure: The Heap handle, if heap is the process heap.
1574 HANDLE WINAPI
RtlDestroyHeap( HANDLE heap
)
1576 HEAP
*heapPtr
= HEAP_GetPtr( heap
);
1577 SUBHEAP
*subheap
, *next
;
1578 ARENA_LARGE
*arena
, *arena_next
;
1582 TRACE("%p\n", heap
);
1583 if (!heapPtr
) return heap
;
1585 if (heap
== processHeap
) return heap
; /* cannot delete the main process heap */
1587 /* remove it from the per-process list */
1588 RtlEnterCriticalSection( &processHeap
->critSection
);
1589 list_remove( &heapPtr
->entry
);
1590 RtlLeaveCriticalSection( &processHeap
->critSection
);
1592 heapPtr
->critSection
.DebugInfo
->Spare
[0] = 0;
1593 RtlDeleteCriticalSection( &heapPtr
->critSection
);
1595 LIST_FOR_EACH_ENTRY_SAFE( arena
, arena_next
, &heapPtr
->large_list
, ARENA_LARGE
, entry
)
1597 list_remove( &arena
->entry
);
1600 NtFreeVirtualMemory( NtCurrentProcess(), &addr
, &size
, MEM_RELEASE
);
1602 LIST_FOR_EACH_ENTRY_SAFE( subheap
, next
, &heapPtr
->subheap_list
, SUBHEAP
, entry
)
1604 if (subheap
== &heapPtr
->subheap
) continue; /* do this one last */
1605 subheap_notify_free_all(subheap
);
1606 list_remove( &subheap
->entry
);
1608 addr
= subheap
->base
;
1609 NtFreeVirtualMemory( NtCurrentProcess(), &addr
, &size
, MEM_RELEASE
);
1611 subheap_notify_free_all(&heapPtr
->subheap
);
1612 if (heapPtr
->pending_free
)
1615 addr
= heapPtr
->pending_free
;
1616 NtFreeVirtualMemory( NtCurrentProcess(), &addr
, &size
, MEM_RELEASE
);
1619 addr
= heapPtr
->subheap
.base
;
1620 NtFreeVirtualMemory( NtCurrentProcess(), &addr
, &size
, MEM_RELEASE
);
1625 /***********************************************************************
1626 * RtlAllocateHeap (NTDLL.@)
1628 * Allocate a memory block from a Heap.
1631 * heap [I] Heap to allocate block from
1632 * flags [I] HEAP_ flags from "winnt.h"
1633 * size [I] Size of the memory block to allocate
1636 * Success: A pointer to the newly allocated block
1640 * This call does not SetLastError().
1642 PVOID WINAPI
RtlAllocateHeap( HANDLE heap
, ULONG flags
, SIZE_T size
)
1645 ARENA_INUSE
*pInUse
;
1647 HEAP
*heapPtr
= HEAP_GetPtr( heap
);
1648 SIZE_T rounded_size
;
1650 /* Validate the parameters */
1652 if (!heapPtr
) return NULL
;
1653 flags
&= HEAP_GENERATE_EXCEPTIONS
| HEAP_NO_SERIALIZE
| HEAP_ZERO_MEMORY
;
1654 flags
|= heapPtr
->flags
;
1655 rounded_size
= ROUND_SIZE(size
) + HEAP_TAIL_EXTRA_SIZE( flags
);
1656 if (rounded_size
< size
) /* overflow */
1658 if (flags
& HEAP_GENERATE_EXCEPTIONS
) RtlRaiseStatus( STATUS_NO_MEMORY
);
1661 if (rounded_size
< HEAP_MIN_DATA_SIZE
) rounded_size
= HEAP_MIN_DATA_SIZE
;
1663 if (!(flags
& HEAP_NO_SERIALIZE
)) RtlEnterCriticalSection( &heapPtr
->critSection
);
1665 if (rounded_size
>= HEAP_MIN_LARGE_BLOCK_SIZE
&& (flags
& HEAP_GROWABLE
))
1667 void *ret
= allocate_large_block( heap
, flags
, size
);
1668 if (!(flags
& HEAP_NO_SERIALIZE
)) RtlLeaveCriticalSection( &heapPtr
->critSection
);
1669 if (!ret
&& (flags
& HEAP_GENERATE_EXCEPTIONS
)) RtlRaiseStatus( STATUS_NO_MEMORY
);
1670 TRACE("(%p,%08x,%08lx): returning %p\n", heap
, flags
, size
, ret
);
1674 /* Locate a suitable free block */
1676 if (!(pArena
= HEAP_FindFreeBlock( heapPtr
, rounded_size
, &subheap
)))
1678 TRACE("(%p,%08x,%08lx): returning NULL\n",
1679 heap
, flags
, size
);
1680 if (!(flags
& HEAP_NO_SERIALIZE
)) RtlLeaveCriticalSection( &heapPtr
->critSection
);
1681 if (flags
& HEAP_GENERATE_EXCEPTIONS
) RtlRaiseStatus( STATUS_NO_MEMORY
);
1685 /* Remove the arena from the free list */
1687 list_remove( &pArena
->entry
);
1689 /* Build the in-use arena */
1691 pInUse
= (ARENA_INUSE
*)pArena
;
1693 /* in-use arena is smaller than free arena,
1694 * so we have to add the difference to the size */
1695 pInUse
->size
= (pInUse
->size
& ~ARENA_FLAG_FREE
) + sizeof(ARENA_FREE
) - sizeof(ARENA_INUSE
);
1696 pInUse
->magic
= ARENA_INUSE_MAGIC
;
1698 /* Shrink the block */
1700 HEAP_ShrinkBlock( subheap
, pInUse
, rounded_size
);
1701 pInUse
->unused_bytes
= (pInUse
->size
& ARENA_SIZE_MASK
) - size
;
1703 notify_alloc( pInUse
+ 1, size
, flags
& HEAP_ZERO_MEMORY
);
1704 initialize_block( pInUse
+ 1, size
, pInUse
->unused_bytes
, flags
);
1706 if (!(flags
& HEAP_NO_SERIALIZE
)) RtlLeaveCriticalSection( &heapPtr
->critSection
);
1708 TRACE("(%p,%08x,%08lx): returning %p\n", heap
, flags
, size
, pInUse
+ 1 );
1713 /***********************************************************************
1714 * RtlFreeHeap (NTDLL.@)
1716 * Free a memory block allocated with RtlAllocateHeap().
1719 * heap [I] Heap that block was allocated from
1720 * flags [I] HEAP_ flags from "winnt.h"
1721 * ptr [I] Block to free
1724 * Success: TRUE, if ptr is NULL or was freed successfully.
1727 BOOLEAN WINAPI
RtlFreeHeap( HANDLE heap
, ULONG flags
, PVOID ptr
)
1729 ARENA_INUSE
*pInUse
;
1733 /* Validate the parameters */
1735 if (!ptr
) return TRUE
; /* freeing a NULL ptr isn't an error in Win2k */
1737 heapPtr
= HEAP_GetPtr( heap
);
1740 RtlSetLastWin32ErrorAndNtStatusFromNtStatus( STATUS_INVALID_HANDLE
);
1744 flags
&= HEAP_NO_SERIALIZE
;
1745 flags
|= heapPtr
->flags
;
1746 if (!(flags
& HEAP_NO_SERIALIZE
)) RtlEnterCriticalSection( &heapPtr
->critSection
);
1748 /* Inform valgrind we are trying to free memory, so it can throw up an error message */
1751 /* Some sanity checks */
1752 pInUse
= (ARENA_INUSE
*)ptr
- 1;
1753 if (!validate_block_pointer( heapPtr
, &subheap
, pInUse
)) goto error
;
1756 free_large_block( heapPtr
, flags
, ptr
);
1758 HEAP_MakeInUseBlockFree( subheap
, pInUse
);
1760 if (!(flags
& HEAP_NO_SERIALIZE
)) RtlLeaveCriticalSection( &heapPtr
->critSection
);
1761 TRACE("(%p,%08x,%p): returning TRUE\n", heap
, flags
, ptr
);
1765 if (!(flags
& HEAP_NO_SERIALIZE
)) RtlLeaveCriticalSection( &heapPtr
->critSection
);
1766 RtlSetLastWin32ErrorAndNtStatusFromNtStatus( STATUS_INVALID_PARAMETER
);
1767 TRACE("(%p,%08x,%p): returning FALSE\n", heap
, flags
, ptr
);
1772 /***********************************************************************
1773 * RtlReAllocateHeap (NTDLL.@)
1775 * Change the size of a memory block allocated with RtlAllocateHeap().
1778 * heap [I] Heap that block was allocated from
1779 * flags [I] HEAP_ flags from "winnt.h"
1780 * ptr [I] Block to resize
1781 * size [I] Size of the memory block to allocate
1784 * Success: A pointer to the resized block (which may be different).
1787 PVOID WINAPI
RtlReAllocateHeap( HANDLE heap
, ULONG flags
, PVOID ptr
, SIZE_T size
)
1789 ARENA_INUSE
*pArena
;
1792 SIZE_T oldBlockSize
, oldActualSize
, rounded_size
;
1795 if (!ptr
) return NULL
;
1796 if (!(heapPtr
= HEAP_GetPtr( heap
)))
1798 RtlSetLastWin32ErrorAndNtStatusFromNtStatus( STATUS_INVALID_HANDLE
);
1802 /* Validate the parameters */
1804 flags
&= HEAP_GENERATE_EXCEPTIONS
| HEAP_NO_SERIALIZE
| HEAP_ZERO_MEMORY
|
1805 HEAP_REALLOC_IN_PLACE_ONLY
;
1806 flags
|= heapPtr
->flags
;
1807 if (!(flags
& HEAP_NO_SERIALIZE
)) RtlEnterCriticalSection( &heapPtr
->critSection
);
1809 rounded_size
= ROUND_SIZE(size
) + HEAP_TAIL_EXTRA_SIZE(flags
);
1810 if (rounded_size
< size
) goto oom
; /* overflow */
1811 if (rounded_size
< HEAP_MIN_DATA_SIZE
) rounded_size
= HEAP_MIN_DATA_SIZE
;
1813 pArena
= (ARENA_INUSE
*)ptr
- 1;
1814 if (!validate_block_pointer( heapPtr
, &subheap
, pArena
)) goto error
;
1817 if (!(ret
= realloc_large_block( heapPtr
, flags
, ptr
, size
))) goto oom
;
1821 /* Check if we need to grow the block */
1823 oldBlockSize
= (pArena
->size
& ARENA_SIZE_MASK
);
1824 oldActualSize
= (pArena
->size
& ARENA_SIZE_MASK
) - pArena
->unused_bytes
;
1825 if (rounded_size
> oldBlockSize
)
1827 char *pNext
= (char *)(pArena
+ 1) + oldBlockSize
;
1829 if (rounded_size
>= HEAP_MIN_LARGE_BLOCK_SIZE
&& (flags
& HEAP_GROWABLE
))
1831 if (flags
& HEAP_REALLOC_IN_PLACE_ONLY
) goto oom
;
1832 if (!(ret
= allocate_large_block( heapPtr
, flags
, size
))) goto oom
;
1833 memcpy( ret
, pArena
+ 1, oldActualSize
);
1834 notify_free( pArena
+ 1 );
1835 HEAP_MakeInUseBlockFree( subheap
, pArena
);
1838 if ((pNext
< (char *)subheap
->base
+ subheap
->size
) &&
1839 (*(DWORD
*)pNext
& ARENA_FLAG_FREE
) &&
1840 (oldBlockSize
+ (*(DWORD
*)pNext
& ARENA_SIZE_MASK
) + sizeof(ARENA_FREE
) >= rounded_size
))
1842 /* The next block is free and large enough */
1843 ARENA_FREE
*pFree
= (ARENA_FREE
*)pNext
;
1844 list_remove( &pFree
->entry
);
1845 pArena
->size
+= (pFree
->size
& ARENA_SIZE_MASK
) + sizeof(*pFree
);
1846 if (!HEAP_Commit( subheap
, pArena
, rounded_size
)) goto oom
;
1847 notify_free( pArena
+ 1 );
1848 HEAP_ShrinkBlock( subheap
, pArena
, rounded_size
);
1849 notify_alloc( pArena
+ 1, size
, FALSE
);
1850 /* FIXME: this is wrong as we may lose old VBits settings */
1851 mark_block_initialized( pArena
+ 1, oldActualSize
);
1853 else /* Do it the hard way */
1856 ARENA_INUSE
*pInUse
;
1857 SUBHEAP
*newsubheap
;
1859 if ((flags
& HEAP_REALLOC_IN_PLACE_ONLY
) ||
1860 !(pNew
= HEAP_FindFreeBlock( heapPtr
, rounded_size
, &newsubheap
)))
1863 /* Build the in-use arena */
1865 list_remove( &pNew
->entry
);
1866 pInUse
= (ARENA_INUSE
*)pNew
;
1867 pInUse
->size
= (pInUse
->size
& ~ARENA_FLAG_FREE
)
1868 + sizeof(ARENA_FREE
) - sizeof(ARENA_INUSE
);
1869 pInUse
->magic
= ARENA_INUSE_MAGIC
;
1870 HEAP_ShrinkBlock( newsubheap
, pInUse
, rounded_size
);
1872 mark_block_initialized( pInUse
+ 1, oldActualSize
);
1873 notify_alloc( pInUse
+ 1, size
, FALSE
);
1874 memcpy( pInUse
+ 1, pArena
+ 1, oldActualSize
);
1876 /* Free the previous block */
1878 notify_free( pArena
+ 1 );
1879 HEAP_MakeInUseBlockFree( subheap
, pArena
);
1880 subheap
= newsubheap
;
1886 /* Shrink the block */
1887 notify_free( pArena
+ 1 );
1888 HEAP_ShrinkBlock( subheap
, pArena
, rounded_size
);
1889 notify_alloc( pArena
+ 1, size
, FALSE
);
1890 /* FIXME: this is wrong as we may lose old VBits settings */
1891 mark_block_initialized( pArena
+ 1, size
);
1894 pArena
->unused_bytes
= (pArena
->size
& ARENA_SIZE_MASK
) - size
;
1896 /* Clear the extra bytes if needed */
1898 if (size
> oldActualSize
)
1899 initialize_block( (char *)(pArena
+ 1) + oldActualSize
, size
- oldActualSize
,
1900 pArena
->unused_bytes
, flags
);
1902 mark_block_tail( (char *)(pArena
+ 1) + size
, pArena
->unused_bytes
, flags
);
1904 /* Return the new arena */
1908 if (!(flags
& HEAP_NO_SERIALIZE
)) RtlLeaveCriticalSection( &heapPtr
->critSection
);
1909 TRACE("(%p,%08x,%p,%08lx): returning %p\n", heap
, flags
, ptr
, size
, ret
);
1913 if (!(flags
& HEAP_NO_SERIALIZE
)) RtlLeaveCriticalSection( &heapPtr
->critSection
);
1914 if (flags
& HEAP_GENERATE_EXCEPTIONS
) RtlRaiseStatus( STATUS_NO_MEMORY
);
1915 RtlSetLastWin32ErrorAndNtStatusFromNtStatus( STATUS_NO_MEMORY
);
1916 TRACE("(%p,%08x,%p,%08lx): returning NULL\n", heap
, flags
, ptr
, size
);
1920 if (!(flags
& HEAP_NO_SERIALIZE
)) RtlLeaveCriticalSection( &heapPtr
->critSection
);
1921 RtlSetLastWin32ErrorAndNtStatusFromNtStatus( STATUS_INVALID_PARAMETER
);
1922 TRACE("(%p,%08x,%p,%08lx): returning NULL\n", heap
, flags
, ptr
, size
);
1927 /***********************************************************************
1928 * RtlCompactHeap (NTDLL.@)
1930 * Compact the free space in a Heap.
1933 * heap [I] Heap that block was allocated from
1934 * flags [I] HEAP_ flags from "winnt.h"
1937 * The number of bytes compacted.
1940 * This function is a harmless stub.
1942 ULONG WINAPI
RtlCompactHeap( HANDLE heap
, ULONG flags
)
1944 static BOOL reported
;
1945 if (!reported
++) FIXME( "(%p, 0x%x) stub\n", heap
, flags
);
1950 /***********************************************************************
1951 * RtlLockHeap (NTDLL.@)
1956 * heap [I] Heap to lock
1959 * Success: TRUE. The Heap is locked.
1960 * Failure: FALSE, if heap is invalid.
1962 BOOLEAN WINAPI
RtlLockHeap( HANDLE heap
)
1964 HEAP
*heapPtr
= HEAP_GetPtr( heap
);
1965 if (!heapPtr
) return FALSE
;
1966 RtlEnterCriticalSection( &heapPtr
->critSection
);
1971 /***********************************************************************
1972 * RtlUnlockHeap (NTDLL.@)
1977 * heap [I] Heap to unlock
1980 * Success: TRUE. The Heap is unlocked.
1981 * Failure: FALSE, if heap is invalid.
1983 BOOLEAN WINAPI
RtlUnlockHeap( HANDLE heap
)
1985 HEAP
*heapPtr
= HEAP_GetPtr( heap
);
1986 if (!heapPtr
) return FALSE
;
1987 RtlLeaveCriticalSection( &heapPtr
->critSection
);
1992 /***********************************************************************
1993 * RtlSizeHeap (NTDLL.@)
1995 * Get the actual size of a memory block allocated from a Heap.
1998 * heap [I] Heap that block was allocated from
1999 * flags [I] HEAP_ flags from "winnt.h"
2000 * ptr [I] Block to get the size of
2003 * Success: The size of the block.
2004 * Failure: -1, heap or ptr are invalid.
2007 * The size may be bigger than what was passed to RtlAllocateHeap().
2009 SIZE_T WINAPI
RtlSizeHeap( HANDLE heap
, ULONG flags
, const void *ptr
)
2012 const ARENA_INUSE
*pArena
;
2014 HEAP
*heapPtr
= HEAP_GetPtr( heap
);
2018 RtlSetLastWin32ErrorAndNtStatusFromNtStatus( STATUS_INVALID_HANDLE
);
2021 flags
&= HEAP_NO_SERIALIZE
;
2022 flags
|= heapPtr
->flags
;
2023 if (!(flags
& HEAP_NO_SERIALIZE
)) RtlEnterCriticalSection( &heapPtr
->critSection
);
2025 pArena
= (const ARENA_INUSE
*)ptr
- 1;
2026 if (!validate_block_pointer( heapPtr
, &subheap
, pArena
))
2028 RtlSetLastWin32ErrorAndNtStatusFromNtStatus( STATUS_INVALID_PARAMETER
);
2033 const ARENA_LARGE
*large_arena
= (const ARENA_LARGE
*)ptr
- 1;
2034 ret
= large_arena
->data_size
;
2038 ret
= (pArena
->size
& ARENA_SIZE_MASK
) - pArena
->unused_bytes
;
2040 if (!(flags
& HEAP_NO_SERIALIZE
)) RtlLeaveCriticalSection( &heapPtr
->critSection
);
2042 TRACE("(%p,%08x,%p): returning %08lx\n", heap
, flags
, ptr
, ret
);
2047 /***********************************************************************
2048 * RtlValidateHeap (NTDLL.@)
2050 * Determine if a block is a valid allocation from a heap.
2053 * heap [I] Heap that block was allocated from
2054 * flags [I] HEAP_ flags from "winnt.h"
2055 * ptr [I] Block to check
2058 * Success: TRUE. The block was allocated from heap.
2059 * Failure: FALSE, if heap is invalid or ptr was not allocated from it.
2061 BOOLEAN WINAPI
RtlValidateHeap( HANDLE heap
, ULONG flags
, LPCVOID ptr
)
2063 HEAP
*heapPtr
= HEAP_GetPtr( heap
);
2064 if (!heapPtr
) return FALSE
;
2065 return HEAP_IsRealArena( heapPtr
, flags
, ptr
, QUIET
);
2069 /***********************************************************************
2070 * RtlWalkHeap (NTDLL.@)
2073 * The PROCESS_HEAP_ENTRY flag values seem different between this
2074 * function and HeapWalk(). To be checked.
2076 NTSTATUS WINAPI
RtlWalkHeap( HANDLE heap
, PVOID entry_ptr
)
2078 LPPROCESS_HEAP_ENTRY entry
= entry_ptr
; /* FIXME */
2079 HEAP
*heapPtr
= HEAP_GetPtr(heap
);
2080 SUBHEAP
*sub
, *currentheap
= NULL
;
2083 int region_index
= 0;
2085 if (!heapPtr
|| !entry
) return STATUS_INVALID_PARAMETER
;
2087 if (!(heapPtr
->flags
& HEAP_NO_SERIALIZE
)) RtlEnterCriticalSection( &heapPtr
->critSection
);
2089 /* FIXME: enumerate large blocks too */
2091 /* set ptr to the next arena to be examined */
2093 if (!entry
->lpData
) /* first call (init) ? */
2095 TRACE("begin walking of heap %p.\n", heap
);
2096 currentheap
= &heapPtr
->subheap
;
2097 ptr
= (char*)currentheap
->base
+ currentheap
->headerSize
;
2101 ptr
= entry
->lpData
;
2102 LIST_FOR_EACH_ENTRY( sub
, &heapPtr
->subheap_list
, SUBHEAP
, entry
)
2104 if ((ptr
>= (char *)sub
->base
) &&
2105 (ptr
< (char *)sub
->base
+ sub
->size
))
2112 if (currentheap
== NULL
)
2114 ERR("no matching subheap found, shouldn't happen !\n");
2115 ret
= STATUS_NO_MORE_ENTRIES
;
2119 if (((ARENA_INUSE
*)ptr
- 1)->magic
== ARENA_INUSE_MAGIC
||
2120 ((ARENA_INUSE
*)ptr
- 1)->magic
== ARENA_PENDING_MAGIC
)
2122 ARENA_INUSE
*pArena
= (ARENA_INUSE
*)ptr
- 1;
2123 ptr
+= pArena
->size
& ARENA_SIZE_MASK
;
2125 else if (((ARENA_FREE
*)ptr
- 1)->magic
== ARENA_FREE_MAGIC
)
2127 ARENA_FREE
*pArena
= (ARENA_FREE
*)ptr
- 1;
2128 ptr
+= pArena
->size
& ARENA_SIZE_MASK
;
2131 ptr
+= entry
->cbData
; /* point to next arena */
2133 if (ptr
> (char *)currentheap
->base
+ currentheap
->size
- 1)
2134 { /* proceed with next subheap */
2135 struct list
*next
= list_next( &heapPtr
->subheap_list
, ¤theap
->entry
);
2137 { /* successfully finished */
2138 TRACE("end reached.\n");
2139 ret
= STATUS_NO_MORE_ENTRIES
;
2142 currentheap
= LIST_ENTRY( next
, SUBHEAP
, entry
);
2143 ptr
= (char *)currentheap
->base
+ currentheap
->headerSize
;
2148 if (*(DWORD
*)ptr
& ARENA_FLAG_FREE
)
2150 ARENA_FREE
*pArena
= (ARENA_FREE
*)ptr
;
2152 /*TRACE("free, magic: %04x\n", pArena->magic);*/
2154 entry
->lpData
= pArena
+ 1;
2155 entry
->cbData
= pArena
->size
& ARENA_SIZE_MASK
;
2156 entry
->cbOverhead
= sizeof(ARENA_FREE
);
2157 entry
->wFlags
= PROCESS_HEAP_UNCOMMITTED_RANGE
;
2161 ARENA_INUSE
*pArena
= (ARENA_INUSE
*)ptr
;
2163 /*TRACE("busy, magic: %04x\n", pArena->magic);*/
2165 entry
->lpData
= pArena
+ 1;
2166 entry
->cbData
= pArena
->size
& ARENA_SIZE_MASK
;
2167 entry
->cbOverhead
= sizeof(ARENA_INUSE
);
2168 entry
->wFlags
= (pArena
->magic
== ARENA_PENDING_MAGIC
) ?
2169 PROCESS_HEAP_UNCOMMITTED_RANGE
: PROCESS_HEAP_ENTRY_BUSY
;
2170 /* FIXME: can't handle PROCESS_HEAP_ENTRY_MOVEABLE
2171 and PROCESS_HEAP_ENTRY_DDESHARE yet */
2174 entry
->iRegionIndex
= region_index
;
2176 /* first element of heap ? */
2177 if (ptr
== (char *)currentheap
->base
+ currentheap
->headerSize
)
2179 entry
->wFlags
|= PROCESS_HEAP_REGION
;
2180 entry
->u
.Region
.dwCommittedSize
= currentheap
->commitSize
;
2181 entry
->u
.Region
.dwUnCommittedSize
=
2182 currentheap
->size
- currentheap
->commitSize
;
2183 entry
->u
.Region
.lpFirstBlock
= /* first valid block */
2184 (char *)currentheap
->base
+ currentheap
->headerSize
;
2185 entry
->u
.Region
.lpLastBlock
= /* first invalid block */
2186 (char *)currentheap
->base
+ currentheap
->size
;
2188 ret
= STATUS_SUCCESS
;
2189 if (TRACE_ON(heap
)) HEAP_DumpEntry(entry
);
2192 if (!(heapPtr
->flags
& HEAP_NO_SERIALIZE
)) RtlLeaveCriticalSection( &heapPtr
->critSection
);
2197 /***********************************************************************
2198 * RtlGetProcessHeaps (NTDLL.@)
2200 * Get the Heaps belonging to the current process.
2203 * count [I] size of heaps
2204 * heaps [O] Destination array for heap HANDLE's
2207 * Success: The number of Heaps allocated by the process.
2210 ULONG WINAPI
RtlGetProcessHeaps( ULONG count
, HANDLE
*heaps
)
2212 ULONG total
= 1; /* main heap */
2215 RtlEnterCriticalSection( &processHeap
->critSection
);
2216 LIST_FOR_EACH( ptr
, &processHeap
->entry
) total
++;
2219 *heaps
++ = processHeap
;
2220 LIST_FOR_EACH( ptr
, &processHeap
->entry
)
2221 *heaps
++ = LIST_ENTRY( ptr
, HEAP
, entry
);
2223 RtlLeaveCriticalSection( &processHeap
->critSection
);
2227 /***********************************************************************
2228 * RtlQueryHeapInformation (NTDLL.@)
2230 NTSTATUS WINAPI
RtlQueryHeapInformation( HANDLE heap
, HEAP_INFORMATION_CLASS info_class
,
2231 PVOID info
, SIZE_T size_in
, PSIZE_T size_out
)
2235 case HeapCompatibilityInformation
:
2236 if (size_out
) *size_out
= sizeof(ULONG
);
2238 if (size_in
< sizeof(ULONG
))
2239 return STATUS_BUFFER_TOO_SMALL
;
2241 *(ULONG
*)info
= 0; /* standard heap */
2242 return STATUS_SUCCESS
;
2245 FIXME("Unknown heap information class %u\n", info_class
);
2246 return STATUS_INVALID_INFO_CLASS
;