2 +----------------------------------------------------------------------+
4 +----------------------------------------------------------------------+
5 | Copyright (c) 2010-2014 Facebook, Inc. (http://www.facebook.com) |
6 +----------------------------------------------------------------------+
7 | This source file is subject to version 3.01 of the PHP license, |
8 | that is bundled with this package in the file LICENSE, and is |
9 | available through the world-wide-web at the following url: |
10 | http://www.php.net/license/3_01.txt |
11 | If you did not receive a copy of the PHP license and are unable to |
12 | obtain it through the world-wide-web, please send a note to |
13 | license@php.net so we can mail you a copy immediately. |
14 +----------------------------------------------------------------------+
17 #ifndef incl_HPHP_UTIL_ALLOC_H_
18 #define incl_HPHP_UTIL_ALLOC_H_
24 #include "folly/Portability.h"
26 #include "hphp/util/exception.h"
28 #ifdef FOLLY_SANITIZE_ADDRESS
29 // ASan is less precise than valgrind so we'll need a superset of those tweaks
31 // TODO: (t2869817) ASan doesn't play well with jemalloc
38 #include <google/malloc_extension.h>
44 # include "malloc_np.h"
50 # undef ALLOCM_NO_MOVE
51 # include <jemalloc/jemalloc.h>
53 # define ALLOCM_ARENA(a) 0
55 # if JEMALLOC_VERSION_MAJOR > 3 || \
56 (JEMALLOC_VERSION_MAJOR == 3 && JEMALLOC_VERSION_MINOR >= 5)
57 # define USE_JEMALLOC_MALLOCX
61 #include "hphp/util/maphuge.h"
65 #define MallocExtensionInstance _ZN15MallocExtension8instanceEv
66 MallocExtension
* MallocExtensionInstance() __attribute__((weak
));
71 int mallctl(const char *name
, void *oldp
, size_t *oldlenp
, void *newp
,
72 size_t newlen
) __attribute__((weak
));
73 int mallctlnametomib(const char *name
, size_t* mibp
, size_t*miblenp
)
74 __attribute__((weak
));
75 int mallctlbymib(const size_t* mibp
, size_t miblen
, void *oldp
,
76 size_t *oldlenp
, void *newp
, size_t newlen
) __attribute__((weak
));
77 void malloc_stats_print(void (*write_cb
)(void *, const char *),
78 void *cbopaque
, const char *opts
)
79 __attribute__((weak
));
84 ///////////////////////////////////////////////////////////////////////////////
86 const bool use_jemalloc
=
94 class OutOfMemoryException
: public Exception
{
96 explicit OutOfMemoryException(size_t size
)
97 : Exception("Unable to allocate %zu bytes of memory", size
) {}
98 virtual ~OutOfMemoryException() throw() {}
99 EXCEPTION_COMMON_IMPL(OutOfMemoryException
);
102 ///////////////////////////////////////////////////////////////////////////////
105 extern unsigned low_arena
;
106 extern std::atomic
<int> low_huge_pages
;
109 inline void* low_malloc(size_t size
) {
113 extern void* low_malloc_impl(size_t size
);
114 return low_malloc_impl(size
);
118 inline void low_free(void* ptr
) {
121 #elif defined(USE_JEMALLOC_MALLOCX)
122 dallocx(ptr
, MALLOCX_ARENA(low_arena
));
124 dallocm(ptr
, ALLOCM_ARENA(low_arena
));
128 inline void low_malloc_huge_pages(int pages
) {
130 low_huge_pages
= pages
;
134 void low_malloc_skip_huge(void* start
, void* end
);
137 * Safe memory allocation.
139 inline void* safe_malloc(size_t size
) {
140 void* p
= malloc(size
);
141 if (!p
) throw OutOfMemoryException(size
);
145 inline void* safe_calloc(size_t count
, size_t size
) {
146 void* p
= calloc(count
, size
);
147 if (!p
) throw OutOfMemoryException(size
);
151 inline void* safe_realloc(void* ptr
, size_t size
) {
152 ptr
= realloc(ptr
, size
);
153 if (!ptr
&& size
> 0) throw OutOfMemoryException(size
);
157 inline void safe_free(void* ptr
) {
162 * Instruct low level memory allocator to free memory back to system. Called
163 * when thread's been idle and predicted to continue to be idle for a while.
165 void flush_thread_caches();
168 * Instruct the kernel to free parts of the unused stack back to the system.
169 * Like flush_thread_caches, this is called when the thread has been idle
170 * and predicted to continue to be idle for a while.
172 void flush_thread_stack();
175 * Like scoped_ptr, but calls free() on destruct
178 ScopedMem(const ScopedMem
&); // disable copying
179 ScopedMem
& operator=(const ScopedMem
&);
181 ScopedMem() : m_ptr(0) {}
182 explicit ScopedMem(void* ptr
) : m_ptr(ptr
) {}
183 ~ScopedMem() { free(m_ptr
); }
184 ScopedMem
& operator=(void* ptr
) {
193 extern __thread
uintptr_t s_stackLimit
;
194 extern __thread
size_t s_stackSize
;
195 void init_stack_limits(pthread_attr_t
* attr
);
197 extern const size_t s_pageSize
;
200 * The numa node this thread is bound to
202 extern __thread
int32_t s_numaNode
;
204 * enable the numa support in hhvm,
205 * and determine whether threads should default to using
208 void enable_numa(bool local
);
210 * Determine the node that the next thread should run on.
212 int next_numa_node();
214 * Set the thread affinity, and the jemalloc arena for the current
216 * Also initializes s_numaNode
218 void set_numa_binding(int node
);
220 * The number of numa nodes in the system
222 int num_numa_nodes();
224 * Enable numa interleaving for the specified address range
226 void numa_interleave(void* start
, size_t size
);
228 * Allocate the specified address range on the local node
230 void numa_local(void* start
, size_t size
);
232 * Allocate the specified address range on the given node
234 void numa_bind_to(void* start
, size_t size
, int node
);
239 * jemalloc pprof utility functions.
241 int jemalloc_pprof_enable();
242 int jemalloc_pprof_disable();
243 int jemalloc_pprof_dump(const std::string
& prefix
, bool force
);
245 #endif // USE_JEMALLOC
247 ///////////////////////////////////////////////////////////////////////////////
250 #endif // incl_HPHP_UTIL_ALLOC_H_