Route ampif77 and ampif90 through ampiCC, factoring out duplicated code
[charm.git] / src / conv-core / memory-isomalloc.c
blob27ef8febaedcaca93ca75c3320ff2a43eaae17f1
1 /******************************************************************************
3 A migratable memory allocator.
5 NOTE: isomalloc is threadsafe, so the isomallocs are not wrapped in CmiMemLock.
7 *****************************************************************************/
9 #define CMK_ISOMALLOC_EXCLUDE_FORTRAN_CALLS 0
11 #if ! CMK_MEMORY_BUILD_OS
12 /* Use Gnumalloc as meta-meta malloc fallbacks (mm_*) */
13 #include "memory-gnu.c"
14 #endif
16 #include "memory-isomalloc.h"
18 /*The current allocation arena */
19 CpvStaticDeclare(CmiIsomallocBlockList *,isomalloc_blocklist);
20 CpvStaticDeclare(CmiIsomallocBlockList *,pushed_blocklist);
22 #define ISOMALLOC_PUSH \
23 CmiIsomallocBlockList *pushed_blocklist=CpvAccess(isomalloc_blocklist);\
24 CpvAccess(isomalloc_blocklist)=NULL;\
26 #define ISOMALLOC_POP \
27 CpvAccess(isomalloc_blocklist)=pushed_blocklist;\
29 /* temporarily disable/enable isomalloc. Note the following two fucntions
30 * must be used in pair, and no suspend of thread is allowed in between
31 * */
32 void CmiDisableIsomalloc()
34 CpvAccess(pushed_blocklist)=CpvAccess(isomalloc_blocklist);
35 CpvAccess(isomalloc_blocklist)=NULL;
38 void CmiEnableIsomalloc()
40 CpvAccess(isomalloc_blocklist)=CpvAccess(pushed_blocklist);
43 #if CMK_HAS_TLS_VARIABLES
44 /**
45 * make sure isomalloc is only called in pthreads that is spawned by Charm++.
46 * It is not safe to call isomalloc in system spawned pthreads for example
47 * mpich pthreads, or aio pthreads.
48 * Use the following TLS variable to distinguish those pthreads.
49 * when set to 1, the current pthreads is allowed to call isomalloc.
51 static CMK_THREADLOCAL int isomalloc_thread = 0;
52 #else
53 #if BIGSIM_OUT_OF_CORE && BIGSIM_OOC_PREFETCH
54 #error TLS support is required for bigsim out-of-core prefetch optimization
55 #endif
56 #endif
58 static int meta_inited = 0;
59 extern int _sync_iso;
60 extern int _sync_iso_warned;
62 static void meta_init(char **argv)
64 if (CmiMyRank()==0) CmiMemoryIs_flag|=CMI_MEMORY_IS_ISOMALLOC;
65 CpvInitialize(CmiIsomallocBlockList *,isomalloc_blocklist);
66 CpvInitialize(CmiIsomallocBlockList *,pushed_blocklist);
67 CpvAccess(isomalloc_blocklist) = NULL;
68 CpvAccess(pushed_blocklist) = NULL;
69 #if CMK_HAS_TLS_VARIABLES
70 isomalloc_thread = 1; /* isomalloc is allowed in this pthread */
71 #endif
72 if (CmiMyRank()==0) meta_inited = 1;
73 #if CMK_SMP
74 if (CmiMyPe()==0 && _sync_iso == 0 && _sync_iso_warned == 0) {
75 _sync_iso_warned = 1;
76 printf("Warning> Using Isomalloc in SMP mode, you may need to run with '+isomalloc_sync'.\n");
78 #endif
81 static void *meta_malloc(size_t size)
83 void *ret=NULL;
84 #if CMK_HAS_TLS_VARIABLES
85 int _isomalloc_thread = isomalloc_thread;
86 if (CmiThreadIs(CMI_THREAD_IS_TLS)) _isomalloc_thread = 1;
87 #endif
88 if (meta_inited && CpvInitialized(isomalloc_blocklist) && CpvAccess(isomalloc_blocklist)
89 #if CMK_HAS_TLS_VARIABLES
90 && _isomalloc_thread
91 #endif
93 { /*Isomalloc a new block and link it in*/
94 ISOMALLOC_PUSH /*Disable isomalloc while inside isomalloc*/
95 #if CMK_ISOMALLOC_EXCLUDE_FORTRAN_CALLS
96 if (CmiIsFortranLibraryCall()==1) {
97 ret=mm_malloc(size);
99 else
100 #endif
101 ret=CmiIsomallocBlockListMalloc(pushed_blocklist,size);
102 ISOMALLOC_POP
104 else /*Just use regular malloc*/
105 ret=mm_malloc(size);
106 return ret;
109 static void meta_free(void *mem)
111 if (mem != NULL && CmiIsomallocInRange(mem))
112 { /*Unlink this slot and isofree*/
113 ISOMALLOC_PUSH
114 CmiIsomallocBlockListFree(mem);
115 ISOMALLOC_POP
117 else /*Just use regular malloc*/
118 mm_free(mem);
121 static void *meta_calloc(size_t nelem, size_t size)
123 void *ret=meta_malloc(nelem*size);
124 if (ret != NULL) memset(ret,0,nelem*size);
125 return ret;
128 static void meta_cfree(void *mem)
130 meta_free(mem);
133 static void *meta_realloc(void *oldBuffer, size_t newSize)
135 void *newBuffer;
136 /*Just forget it for regular malloc's:*/
137 if (!CmiIsomallocInRange(oldBuffer))
138 return mm_realloc(oldBuffer,newSize);
140 newBuffer = meta_malloc(newSize);
141 if ( newBuffer && oldBuffer ) {
142 /*Must preserve old buffer contents, so we need the size of the
143 buffer. SILLY HACK: muck with internals of blocklist header.*/
144 size_t size=CmiIsomallocLength(((CmiIsomallocBlockList *)oldBuffer)-1)-
145 sizeof(CmiIsomallocBlockList);
146 if (size>newSize) size=newSize;
147 if (size > 0)
148 memcpy(newBuffer, oldBuffer, size);
150 if (oldBuffer)
151 meta_free(oldBuffer);
152 return newBuffer;
155 static void *meta_memalign(size_t align, size_t size)
157 void *ret=NULL;
158 if (CpvInitialized(isomalloc_blocklist) && CpvAccess(isomalloc_blocklist))
159 { /*Isomalloc a new block and link it in*/
160 ISOMALLOC_PUSH /*Disable isomalloc while inside isomalloc*/
161 #if CMK_ISOMALLOC_EXCLUDE_FORTRAN_CALLS
162 if (CmiIsFortranLibraryCall()==1) {
163 ret=mm_memalign(align, size);
165 else
166 #endif
167 ret=CmiIsomallocBlockListMallocAlign(pushed_blocklist,align,size);
168 ISOMALLOC_POP
170 else /*Just use regular memalign*/
171 ret=mm_memalign(align, size);
172 return ret;
175 static int meta_posix_memalign(void **outptr, size_t align, size_t size)
177 int ret = 0;
178 if (CpvInitialized(isomalloc_blocklist) && CpvAccess(isomalloc_blocklist))
179 { /*Isomalloc a new block and link it in*/
180 ISOMALLOC_PUSH /*Disable isomalloc while inside isomalloc*/
181 #if CMK_ISOMALLOC_EXCLUDE_FORTRAN_CALLS
182 if (CmiIsFortranLibraryCall()==1) {
183 ret=mm_posix_memalign(outptr, align, size);
185 else
186 #endif
187 *outptr = CmiIsomallocBlockListMallocAlign(pushed_blocklist,align,size);
188 ISOMALLOC_POP
190 else /*Just use regular posix_memalign*/
191 ret=mm_posix_memalign(outptr, align, size);
192 return ret;
195 static void *meta_aligned_alloc(size_t align, size_t size)
197 void *ret=NULL;
198 if (CpvInitialized(isomalloc_blocklist) && CpvAccess(isomalloc_blocklist))
199 { /*Isomalloc a new block and link it in*/
200 ISOMALLOC_PUSH /*Disable isomalloc while inside isomalloc*/
201 #if CMK_ISOMALLOC_EXCLUDE_FORTRAN_CALLS
202 if (CmiIsFortranLibraryCall()==1) {
203 ret=mm_aligned_alloc(align, size);
205 else
206 #endif
207 ret=CmiIsomallocBlockListMallocAlign(pushed_blocklist,align,size);
208 ISOMALLOC_POP
210 else /*Just use regular aligned_alloc*/
211 ret=mm_aligned_alloc(align, size);
212 return ret;
215 static void *meta_valloc(size_t size)
217 return meta_memalign(CmiGetPageSize(), size);
220 static void *meta_pvalloc(size_t size)
222 const size_t pagesize = CmiGetPageSize();
223 return meta_memalign(pagesize, (size + pagesize - 1) & ~(pagesize - 1));
226 #define CMK_MEMORY_HAS_NOMIGRATE
227 /*Allocate non-migratable memory:*/
228 void *malloc_nomigrate(size_t size) {
229 void *result;
230 CmiMemLock();
231 result = mm_malloc(size);
232 CmiMemUnlock();
233 return result;
236 void free_nomigrate(void *mem)
238 CmiMemLock();
239 mm_free(mem);
240 CmiMemUnlock();
243 #define CMK_MEMORY_HAS_ISOMALLOC
245 /*Make this blockList "active"-- the recipient of incoming
246 mallocs. Returns the old blocklist.*/
247 CmiIsomallocBlockList *CmiIsomallocBlockListActivate(CmiIsomallocBlockList *l)
249 CmiIsomallocBlockList **s=&CpvAccess(isomalloc_blocklist);
250 CmiIsomallocBlockList *ret=*s;
251 *s=l;
252 return ret;
255 CmiIsomallocBlockList *CmiIsomallocBlockListCurrent(void){
256 return CpvAccess(isomalloc_blocklist);