Add sub-controls for Hack array compat runtime checks
[hiphop-php.git] / hphp / runtime / base / req-containers.h
blob3d97277dc79df9abdbafdc1b1ca498383c3c277e
1 /*
2 +----------------------------------------------------------------------+
3 | HipHop for PHP |
4 +----------------------------------------------------------------------+
5 | Copyright (c) 2010-present Facebook, Inc. (http://www.facebook.com) |
6 +----------------------------------------------------------------------+
7 | This source file is subject to version 3.01 of the PHP license, |
8 | that is bundled with this package in the file LICENSE, and is |
9 | available through the world-wide-web at the following url: |
10 | http://www.php.net/license/3_01.txt |
11 | If you did not receive a copy of the PHP license and are unable to |
12 | obtain it through the world-wide-web, please send a note to |
13 | license@php.net so we can mail you a copy immediately. |
14 +----------------------------------------------------------------------+
16 #ifndef incl_HPHP_RUNTIME_BASE_REQ_CONTAINERS_H_
17 #define incl_HPHP_RUNTIME_BASE_REQ_CONTAINERS_H_
19 #include <cstdlib>
20 #include <deque>
21 #include <forward_list>
22 #include <functional>
23 #include <limits>
24 #include <list>
25 #include <map>
26 #include <memory>
27 #include <queue>
28 #include <set>
29 #include <stack>
30 #include <unordered_map>
31 #include <unordered_set>
32 #include <utility>
33 #include <vector>
35 #include <boost/container/flat_map.hpp>
37 #include <folly/Memory.h>
38 #include <folly/Optional.h>
40 #include "hphp/runtime/base/req-malloc.h"
42 #include "hphp/util/fixed-vector.h"
43 #include "hphp/util/tiny-vector.h"
44 #include "hphp/util/type-scan.h"
45 #include "hphp/util/hash-map-typedefs.h"
47 namespace HPHP { namespace req {
49 //////////////////////////////////////////////////////////////////////
52 * Defines a family of types similar to std:: collections and pointers, except
53 * using the request-local allocator and with GC type-scanner annotations.
55 * Replace std:: with req:: if you know the data is request-local.
59 * Most of these replacements purposefully derive from standard types and
60 * containers, which is normally not recommended. This is needed so we can add
61 * GC type-scanner annotations. This is safe because we don't add any members,
62 * nor change any functionality.
66 * Shorthands to create std::unique_ptr or std::shared_ptr to a
67 * heap-allocated object. Memory will be request-scoped; the pointer
68 * wrapper remembers how to properly delete the object with req::Allocator.
70 * Usage:
71 * auto ptr = req::make_unique<Foo>(...);
72 * auto ptr = req::make_shared<Foo>(...);
75 template <typename T>
76 struct unique_ptr final : folly::AllocatorUniquePtr<T, Allocator<T>>::type {
77 using Base = typename folly::AllocatorUniquePtr<T,Allocator<T>>::type;
78 template <class... Args>
79 /* implicit */ unique_ptr(Args&&... args) :
80 Base(std::forward<Args>(args)...) {}
81 // Unlike the rest, we don't want to ignore the base. Its easy to type-scan
82 // unique_ptr and more efficient to let it do it itself.
83 TYPE_SCAN_SILENCE_FORBIDDEN_BASES(Base);
86 template<typename T, class... Args>
87 unique_ptr<T> make_unique(Args&&... args) {
88 return folly::allocate_unique<T>(
89 Allocator<T>(),
90 std::forward<Args>(args)...
94 template <typename T> struct weak_ptr;
97 * req::shared_ptr does not derive from std::shared_ptr.
98 * 1) They have different scopes (req::shared_ptr is per-request)
99 * 2) Guaranteeing req::shared_ptr's can only be instantiated via make_shared
100 * makes type scanning easier.
103 template <typename T>
104 struct shared_ptr final {
105 shared_ptr() = default;
106 explicit shared_ptr(std::nullptr_t) :
107 m_std_ptr(nullptr) {
109 template<class Y>
110 shared_ptr( const shared_ptr<Y>& r )
111 : m_std_ptr(r.m_std_ptr),
112 m_scan_size(r.m_scan_size),
113 m_scan_index(r.m_scan_index) {
115 template <class Y>
116 shared_ptr(shared_ptr<Y>&& r)
117 : m_std_ptr(std::move(r.m_std_ptr)),
118 m_scan_size(std::move(r.m_scan_size)),
119 m_scan_index(std::move(r.m_scan_index)) {
121 template<class Y>
122 explicit shared_ptr(const weak_ptr<Y>& r)
123 : m_std_ptr(r),
124 m_scan_size(r.m_scan_size),
125 m_scan_index(r.m_scan_index) {
128 TYPE_SCAN_CUSTOM(T) {
129 if (this->get() != nullptr) {
130 scanner.scanByIndex(m_scan_index, this->get(), m_scan_size);
134 T* get() const {
135 return m_std_ptr.get();
137 T& operator*() const {
138 return *get();
140 T* operator->() const {
141 return get();
143 explicit operator bool() const {
144 return get() != nullptr;
146 template <class U>
147 bool operator==(const shared_ptr<U>& rhs) {
148 return m_std_ptr == rhs.m_std_ptr;
150 template<class Y>
151 shared_ptr& operator=(const shared_ptr<Y>& r) {
152 m_std_ptr = r.m_std_ptr;
153 m_scan_size = r.m_scan_size;
154 m_scan_index = r.m_scan_index;
155 return *this;
157 template<class Y>
158 shared_ptr& operator=(shared_ptr<Y>&& r) {
159 m_std_ptr = std::move(r.m_std_ptr);
160 m_scan_size = std::move(r.m_scan_size);
161 m_scan_index = std::move(r.m_scan_index);
162 return *this;
164 void reset() {
165 m_std_ptr.reset();
166 m_scan_size = 0;
167 m_scan_index = type_scan::kIndexUnknownNoPtrs;
169 void swap(shared_ptr& r) {
170 m_std_ptr.swap(r.m_std_ptr);
171 std::swap(m_scan_index, r.m_scan_index);
172 std::swap(m_scan_size, r.m_scan_size);
174 void swap(T& t) {
175 m_std_ptr.swap(t);
176 m_scan_size = sizeof(t);
177 m_scan_index = type_scan::getIndexForScan<T>();
180 template<class U, class... Args>
181 friend shared_ptr<U> make_shared(Args&&... args);
182 friend struct weak_ptr<T>;
184 private:
185 std::shared_ptr<T> m_std_ptr;
186 std::size_t m_scan_size = 0;
187 type_scan::Index m_scan_index = type_scan::kIndexUnknownNoPtrs;
188 template<class Y>
189 shared_ptr(const std::shared_ptr<Y>& r, std::size_t size,
190 type_scan::Index index)
191 : m_std_ptr(r), m_scan_size(size), m_scan_index(index) {
195 template<class T, class... Args>
196 shared_ptr<T> make_shared(Args&&... args) {
197 ConservativeAllocator<T> alloc;
198 std::shared_ptr<T> r = std::allocate_shared<T>(alloc,
199 std::forward<Args>(args)...);
200 return shared_ptr<T>(r, sizeof(*r), type_scan::getIndexForScan<T>());
203 template <class T>
204 void swap(shared_ptr<T>& lhs, shared_ptr<T>& rhs) {
205 lhs.swap(rhs);
208 template <typename T>
209 struct weak_ptr final : private std::weak_ptr<T> {
210 using Base = std::weak_ptr<T>;
211 template <class Y>
212 explicit weak_ptr (const shared_ptr<Y>& r) :
213 Base(r.m_std_ptr),
214 m_scan_size(r.m_scan_size),
215 m_scan_index(r.m_scan_index) {
218 shared_ptr<T> lock() const {
219 std::shared_ptr<T> r = Base::lock();
220 return shared_ptr<T>(r, sizeof(*r), type_scan::getIndexForScan<T>());
222 TYPE_SCAN_IGNORE_BASES(Base);
223 TYPE_SCAN_IGNORE_ALL;
225 friend struct shared_ptr<T>;
226 private:
227 std::size_t m_scan_size = 0;
228 type_scan::Index m_scan_index = type_scan::kIndexUnknownNoPtrs;
231 #ifndef __APPLE__ // XXX: this affects codegen quality but not correctness
232 static_assert(
233 sizeof(unique_ptr<int>) == sizeof(std::unique_ptr<int>),
234 "req::unique_ptr pointer should not be larger than std::unique_ptr"
236 #endif
239 * Container replacements.
241 * For all the containers, instruct the type-scanning machinery to ignore the
242 * base class (the actual container). This is because the container internals
243 * typically use something like std::aligned_storage<> for the actual value, so
244 * the scanner will not find it on its own. Instead, ignore the base, and
245 * provide a custom scanner which uses the public interface to get at the
246 * values. The actual container nodes are allocated with a conservative scan
247 * action, so if the container lies on the stack or anywhere else we're
248 * conservative scanning, we'll conservative scan that as well.
251 template <typename Key,
252 typename T,
253 typename Compare = std::less<Key>>
254 struct map final : std::map<Key, T, Compare,
255 ConservativeAllocator<std::pair<const Key,T>>
257 using Base = std::map<Key, T, Compare,
258 ConservativeAllocator<std::pair<const Key, T>>>;
259 using Base::Base;
260 TYPE_SCAN_IGNORE_BASES(Base);
261 TYPE_SCAN_CUSTOM(Key, T) {
262 for (const auto& pair : *this) scanner.scan(pair);
266 template <typename Key,
267 typename T,
268 typename Compare = std::less<Key>>
269 struct multimap final : std::multimap<Key, T, Compare,
270 ConservativeAllocator<
271 std::pair<const Key,T>>> {
272 using Base = std::multimap<Key, T, Compare,
273 ConservativeAllocator<std::pair<const Key, T>>>;
274 using Base::Base;
275 TYPE_SCAN_IGNORE_BASES(Base);
276 TYPE_SCAN_CUSTOM(Key, T) {
277 for (const auto& pair : *this) scanner.scan(pair);
281 template <typename T, typename Compare = std::less<T>>
282 struct set final : std::set<T, Compare, ConservativeAllocator<T>> {
283 using Base = std::set<T, Compare, ConservativeAllocator<T>>;
284 using Base::Base;
285 TYPE_SCAN_IGNORE_BASES(Base);
286 TYPE_SCAN_CUSTOM(T) {
287 for (const auto& v : *this) scanner.scan(v);
291 template <typename T, typename Compare = std::less<T>>
292 struct multiset final : std::multiset<T, Compare, ConservativeAllocator<T>> {
293 using Base = std::multiset<T, Compare, ConservativeAllocator<T>>;
294 using Base::Base;
295 TYPE_SCAN_IGNORE_BASES(Base);
296 TYPE_SCAN_CUSTOM(T) {
297 for (const auto& v : *this) scanner.scan(v);
301 template <typename T>
302 struct deque final : std::deque<T, ConservativeAllocator<T>> {
303 using Base = std::deque<T, ConservativeAllocator<T>>;
304 using Base::Base;
305 TYPE_SCAN_IGNORE_BASES(Base);
306 TYPE_SCAN_CUSTOM(T) {
307 for (const auto& v : *this) scanner.scan(v);
311 template <typename T>
312 struct vector final : std::vector<T, ConservativeAllocator<T>> {
313 using Base = std::vector<T, ConservativeAllocator<T>>;
314 using Base::Base;
315 TYPE_SCAN_IGNORE_BASES(Base);
316 TYPE_SCAN_CUSTOM(T) {
317 for (const auto& v : *this) scanner.scan(v);
321 template <typename T>
322 struct list final : std::list<T, ConservativeAllocator<T>> {
323 using Base = std::list<T, ConservativeAllocator<T>>;
324 using Base::Base;
325 TYPE_SCAN_IGNORE_BASES(Base);
326 TYPE_SCAN_CUSTOM(T) {
327 for (const auto& v : *this) scanner.scan(v);
331 template <typename T>
332 struct forward_list final : std::forward_list<T, ConservativeAllocator<T>> {
333 using Base = std::forward_list<T, ConservativeAllocator<T>>;
334 using Base::Base;
335 TYPE_SCAN_IGNORE_BASES(Base);
336 TYPE_SCAN_CUSTOM(T) {
337 for (const auto& v : *this) scanner.scan(v);
341 template <typename T, typename Container = req::deque<T>>
342 using stack = std::stack<T, Container>;
344 template <typename T, typename Container = req::deque<T>>
345 using queue = std::queue<T, Container>;
347 template <typename T,
348 typename Container = req::vector<T>,
349 typename Compare = std::less<T>>
350 using priority_queue = std::priority_queue<T, Container, Compare>;
352 template<typename K, typename V, typename Pred = std::less<K>>
353 struct flat_map final : boost::container::flat_map<
354 K, V, Pred, ConservativeAllocator<std::pair<K,V>>
356 using Base =
357 boost::container::flat_map<K, V, Pred,
358 ConservativeAllocator<std::pair<K,V>>>;
359 using Base::Base;
360 TYPE_SCAN_IGNORE_BASES(Base);
361 TYPE_SCAN_CUSTOM(K, V) {
362 for (const auto& pair : *this) scanner.scan(pair);
366 template<typename K, typename V, typename Pred = std::less<K>>
367 struct flat_multimap final : boost::container::flat_multimap<
368 K, V, Pred, ConservativeAllocator<std::pair<K,V>>
370 using Base = boost::container::flat_multimap<
371 K, V, Pred, ConservativeAllocator<std::pair<K,V>>
373 using Base::Base;
374 TYPE_SCAN_IGNORE_BASES(Base);
375 TYPE_SCAN_CUSTOM(K, V) {
376 for (const auto& pair : *this) scanner.scan(pair);
380 template<typename K, typename Compare = std::less<K>>
381 struct flat_set final : boost::container::flat_set<K, Compare,
382 ConservativeAllocator<K>> {
383 using Base = boost::container::flat_set<K, Compare, ConservativeAllocator<K>>;
384 using Base::Base;
385 TYPE_SCAN_IGNORE_BASES(Base);
386 TYPE_SCAN_CUSTOM(K) {
387 for (const auto& v : *this) scanner.scan(v);
391 template<typename K, typename Compare = std::less<K>>
392 struct flat_multiset final : boost::container::flat_multiset<
393 K, Compare, ConservativeAllocator<K>
395 using Base =
396 boost::container::flat_multiset<K, Compare, ConservativeAllocator<K>>;
397 using Base::Base;
398 TYPE_SCAN_IGNORE_BASES(Base);
399 TYPE_SCAN_CUSTOM(K) {
400 for (const auto& v : *this) scanner.scan(v);
404 template <class T,
405 class U,
406 class V = std::hash<T>,
407 class W = std::equal_to<T>>
408 struct hash_map final : std::unordered_map<
409 T, U, V, W,
410 ConservativeAllocator<std::pair<const T,U>>
412 hash_map()
413 : std::unordered_map<T, U, V, W,
414 ConservativeAllocator<std::pair<const T,U>>>
415 GOOD_UNORDERED_CTOR
418 using Base = std::unordered_map<
419 T, U, V, W, ConservativeAllocator<std::pair<const T, U>>
422 TYPE_SCAN_IGNORE_BASES(Base);
423 TYPE_SCAN_CUSTOM(T, U) {
424 for (const auto& pair : *this) scanner.scan(pair);
428 template <class T,
429 class U,
430 class V = std::hash<T>,
431 class W = std::equal_to<T>>
432 struct hash_multimap final : std::unordered_multimap<
433 T, U, V, W,
434 ConservativeAllocator<std::pair<const T,U>>
436 hash_multimap()
437 : std::unordered_multimap<T, U, V, W,
438 ConservativeAllocator<std::pair<const T,U>>>
439 GOOD_UNORDERED_CTOR
442 using Base = std::unordered_multimap<
443 T, U, V, W, ConservativeAllocator<std::pair<const T, U>>
446 TYPE_SCAN_IGNORE_BASES(Base);
447 TYPE_SCAN_CUSTOM(T, U) {
448 for (const auto& pair : *this) scanner.scan(pair);
452 template <class T,
453 class V = std::hash<T>,
454 class W = std::equal_to<T>>
455 struct hash_set final : std::unordered_set<T,V,W,ConservativeAllocator<T> > {
456 hash_set()
457 : std::unordered_set<T,V,W,ConservativeAllocator<T>>
458 GOOD_UNORDERED_CTOR
461 using Base = std::unordered_set<T,V,W,ConservativeAllocator<T>>;
463 TYPE_SCAN_IGNORE_BASES(Base);
464 TYPE_SCAN_CUSTOM(T) {
465 for (const auto& v : *this) scanner.scan(v);
469 template <typename T>
470 struct FixedVector final : HPHP::FixedVector<T, ConservativeAllocator<T>> {
471 using Base = HPHP::FixedVector<T, ConservativeAllocator<T>>;
472 using Base::Base;
473 TYPE_SCAN_IGNORE_BASES(Base);
474 TYPE_SCAN_CUSTOM(T) {
475 for (const auto& v : *this) scanner.scan(v);
479 // Special allocator for TinyVector using request heap allocation.
480 template <typename T, typename Element = T> struct TinyVectorReqAllocator {
481 template <typename U> struct rebind {
482 using type = TinyVectorReqAllocator<U, Element>;
485 void* allocate(std::size_t size) const {
486 return req::malloc(
487 size,
488 type_scan::getIndexForMalloc<
489 T, type_scan::Action::Conservative<Element>
493 void deallocate(void* ptr) const { req::free(ptr); }
494 std::size_t usable_size(void* /*ptr*/, std::size_t size) const {
495 return size;
499 template<typename T,
500 std::size_t Internal = 1,
501 std::size_t MinHeap = 0>
502 struct TinyVector final : HPHP::TinyVector<T,
503 Internal,
504 MinHeap,
505 TinyVectorReqAllocator<T>> {
506 using Base = HPHP::TinyVector<T,Internal,MinHeap,TinyVectorReqAllocator<T>>;
507 using Base::Base;
508 TYPE_SCAN_IGNORE_BASES(Base);
509 TYPE_SCAN_CUSTOM(T) {
510 for (const auto& v : *this) scanner.scan(v);
514 //////////////////////////////////////////////////////////////////////
517 * Like folly::Optional, but exactly scans T
519 template<typename T>
520 struct Optional: folly::Optional<T> {
521 using Base = folly::Optional<T>;
522 TYPE_SCAN_IGNORE_BASES(Base);
523 TYPE_SCAN_CUSTOM(T) {
524 if (this->hasValue()) scanner.scan(*this->get_pointer());
530 #endif