4 * Very simple linked-list based malloc()/free().
17 DECLARE_INIT_SEMAPHORE(__malloc_semaphore
, 1);
19 static void *__malloc_from_block(struct free_arena_header
*fp
,
20 size_t size
, malloc_tag_t tag
)
23 struct free_arena_header
*nfp
, *na
;
24 unsigned int heap
= ARENA_HEAP_GET(fp
->a
.attrs
);
26 fsize
= ARENA_SIZE_GET(fp
->a
.attrs
);
28 /* We need the 2* to account for the larger requirements of a free block */
29 if ( fsize
>= size
+2*sizeof(struct arena_header
) ) {
30 /* Bigger block than required -- split block */
31 nfp
= (struct free_arena_header
*)((char *)fp
+ size
);
34 ARENA_TYPE_SET(nfp
->a
.attrs
, ARENA_TYPE_FREE
);
35 ARENA_HEAP_SET(nfp
->a
.attrs
, heap
);
36 ARENA_SIZE_SET(nfp
->a
.attrs
, fsize
-size
);
37 nfp
->a
.tag
= MALLOC_FREE
;
39 nfp
->a
.magic
= ARENA_MAGIC
;
41 ARENA_TYPE_SET(fp
->a
.attrs
, ARENA_TYPE_USED
);
42 ARENA_SIZE_SET(fp
->a
.attrs
, size
);
45 /* Insert into all-block chain */
51 /* Replace current block on free chain */
52 nfp
->next_free
= fp
->next_free
;
53 nfp
->prev_free
= fp
->prev_free
;
54 fp
->next_free
->prev_free
= nfp
;
55 fp
->prev_free
->next_free
= nfp
;
57 /* Allocate the whole block */
58 ARENA_TYPE_SET(fp
->a
.attrs
, ARENA_TYPE_USED
);
61 /* Remove from free chain */
62 fp
->next_free
->prev_free
= fp
->prev_free
;
63 fp
->prev_free
->next_free
= fp
->next_free
;
66 return (void *)(&fp
->a
+ 1);
69 static void *_malloc(size_t size
, enum heap heap
, malloc_tag_t tag
)
71 struct free_arena_header
*fp
;
72 struct free_arena_header
*head
= &__core_malloc_head
[heap
];
75 dprintf("_malloc(%zu, %u, %u) @ %p = ",
76 size
, heap
, tag
, __builtin_return_address(0));
78 sem_down(&__malloc_semaphore
, 0);
81 /* Add the obligatory arena header, and round up */
82 size
= (size
+ 2 * sizeof(struct arena_header
) - 1) & ARENA_SIZE_MASK
;
84 for ( fp
= head
->next_free
; fp
!= head
; fp
= fp
->next_free
) {
85 if ( ARENA_SIZE_GET(fp
->a
.attrs
) >= size
) {
86 /* Found fit -- allocate out of this block */
87 p
= __malloc_from_block(fp
, size
, tag
);
93 sem_up(&__malloc_semaphore
);
99 __export
void *malloc(size_t size
)
101 return _malloc(size
, HEAP_MAIN
, MALLOC_CORE
);
104 __export
void *lmalloc(size_t size
)
108 p
= _malloc(size
, HEAP_LOWMEM
, MALLOC_CORE
);
114 void *pmapi_lmalloc(size_t size
)
116 return _malloc(size
, HEAP_LOWMEM
, MALLOC_MODULE
);
119 __export
void *realloc(void *ptr
, size_t size
)
121 struct free_arena_header
*ah
, *nah
;
122 struct free_arena_header
*head
;
125 size_t newsize
, oldsize
, xsize
;
135 ah
= (struct free_arena_header
*)
136 ((struct arena_header
*)ptr
- 1);
138 head
= &__core_malloc_head
[ARENA_HEAP_GET(ah
->a
.attrs
)];
141 if (ah
->a
.magic
!= ARENA_MAGIC
)
142 dprintf("failed realloc() magic check: %p\n", ptr
);
145 /* Actual size of the old block */
146 //oldsize = ah->a.size;
147 oldsize
= ARENA_SIZE_GET(ah
->a
.attrs
);
149 /* Add the obligatory arena header, and round up */
150 newsize
= (size
+ 2 * sizeof(struct arena_header
) - 1) & ARENA_SIZE_MASK
;
152 if (oldsize
>= newsize
&& newsize
>= (oldsize
>> 2) &&
153 oldsize
- newsize
< 4096) {
154 /* This allocation is close enough already. */
160 if ((char *)nah
== (char *)ah
+ ARENA_SIZE_GET(ah
->a
.attrs
) &&
161 ARENA_TYPE_GET(nah
->a
.attrs
) == ARENA_TYPE_FREE
&&
162 ARENA_SIZE_GET(nah
->a
.attrs
) + oldsize
>= newsize
) {
163 //nah->a.type == ARENA_TYPE_FREE &&
164 //oldsize + nah->a.size >= newsize) {
165 /* Merge in subsequent free block */
166 ah
->a
.next
= nah
->a
.next
;
167 ah
->a
.next
->a
.prev
= ah
;
168 nah
->next_free
->prev_free
= nah
->prev_free
;
169 nah
->prev_free
->next_free
= nah
->next_free
;
170 ARENA_SIZE_SET(ah
->a
.attrs
, ARENA_SIZE_GET(ah
->a
.attrs
) +
171 ARENA_SIZE_GET(nah
->a
.attrs
));
172 xsize
= ARENA_SIZE_GET(ah
->a
.attrs
);
175 if (xsize
>= newsize
) {
176 /* We can reallocate in place */
177 if (xsize
>= newsize
+ 2 * sizeof(struct arena_header
)) {
178 /* Residual free block at end */
179 nah
= (struct free_arena_header
*)((char *)ah
+ newsize
);
180 ARENA_TYPE_SET(nah
->a
.attrs
, ARENA_TYPE_FREE
);
181 ARENA_SIZE_SET(nah
->a
.attrs
, xsize
- newsize
);
182 ARENA_SIZE_SET(ah
->a
.attrs
, newsize
);
183 ARENA_HEAP_SET(nah
->a
.attrs
, ARENA_HEAP_GET(ah
->a
.attrs
));
186 nah
->a
.magic
= ARENA_MAGIC
;
189 //nah->a.type = ARENA_TYPE_FREE;
190 //nah->a.size = xsize - newsize;
191 //ah->a.size = newsize;
193 /* Insert into block list */
194 nah
->a
.next
= ah
->a
.next
;
196 nah
->a
.next
->a
.prev
= nah
;
199 /* Insert into free list */
200 if (newsize
> oldsize
) {
201 /* Hack: this free block is in the path of a memory object
202 which has already been grown at least once. As such, put
203 it at the *end* of the freelist instead of the beginning;
204 trying to save it for future realloc()s of the same block. */
205 nah
->prev_free
= head
->prev_free
;
206 nah
->next_free
= head
;
207 head
->prev_free
= nah
;
208 nah
->prev_free
->next_free
= nah
;
210 nah
->next_free
= head
->next_free
;
211 nah
->prev_free
= head
;
212 head
->next_free
= nah
;
213 nah
->next_free
->prev_free
= nah
;
216 /* otherwise, use up the whole block */
219 /* Last resort: need to allocate a new block and copy */
220 oldsize
-= sizeof(struct arena_header
);
221 newptr
= malloc(size
);
223 memcpy(newptr
, ptr
, min(size
, oldsize
));
231 __export
void *zalloc(size_t size
)
237 memset(ptr
, 0, size
);