Optimize struct element initialization
[hiphop-php.git] / hphp / runtime / vm / memo-cache.cpp
blobe92a0f11f631aaf508319bd8d2669efd35153c13
1 /*
2 +----------------------------------------------------------------------+
3 | HipHop for PHP |
4 +----------------------------------------------------------------------+
5 | Copyright (c) 2010-present Facebook, Inc. (http://www.facebook.com) |
6 +----------------------------------------------------------------------+
7 | This source file is subject to version 3.01 of the PHP license, |
8 | that is bundled with this package in the file LICENSE, and is |
9 | available through the world-wide-web at the following url: |
10 | http://www.php.net/license/3_01.txt |
11 | If you did not receive a copy of the PHP license and are unable to |
12 | obtain it through the world-wide-web, please send a note to |
13 | license@php.net so we can mail you a copy immediately. |
14 +----------------------------------------------------------------------+
17 #include "memo-cache.h"
19 #include "hphp/runtime/base/string-data.h"
20 #include "hphp/runtime/base/tv-refcount.h"
22 #include <bitset>
23 #include <type_traits>
24 #include <folly/container/F14Map.h>
26 namespace HPHP {
28 namespace memoCacheDetail {
30 // Dead simple hash combiner. This is a terrible hash function, but it serves to
31 // combine the two values and is very cheap (can be implemented with a single
32 // LEA on x64). We rely on F14's bit mixer to do the heavy work of scrambling
33 // the bits.
34 inline size_t combineHashes(uint64_t a, uint64_t b) {
35 return (a * 9) + b;
38 ////////////////////////////////////////////////////////////
41 * Keys in memo caches are composed of two pieces, the "header" and the
42 * "storage". The header is what stores the meta-data for the key, which might
43 * include the length of the key or the associated FuncId. In some cases, the
44 * header might be empty, and it only serves as a policy class. The storage
45 * stores the actual key values (which are represented as KeyElem), and might be
46 * fixed-size, or a variable size data-structure. KeyElem doesn't know if it
47 * stores a string or integer, so storage is responsible for tracking this
48 * information as well. Everything is templated to allow for one implementation
49 * of all the operations with keys which is efficient for each representation.
52 // Represents a single element of a key (corresponding to a parameter).
53 struct KeyElem {
54 union {
55 int64_t i;
56 StringData* s;
59 // These shouldn't be copied because we don't perform ref-count manipulations.
60 KeyElem() = default;
61 KeyElem(KeyElem&&) = delete;
62 KeyElem(const KeyElem&) = delete;
63 KeyElem& operator=(const KeyElem&) = delete;
64 KeyElem& operator=(KeyElem&&) = delete;
66 // Basic operations. KeyElem doesn't know whether its an integer or string, so
67 // these operations must be told that.
68 bool equals(const TypedValue& key, bool isString) const {
69 if (!isString) return isIntType(key.m_type) && i == key.m_data.num;
70 if (!isStringType(key.m_type)) return false;
71 auto const s2 = key.m_data.pstr;
72 return (s == s2) || s->same(s2);
75 bool equals(const KeyElem& other, bool isString) const {
76 if (!isString) return i == other.i;
77 return (s->hash() == other.s->hash()) && s->same(other.s);
80 size_t hash(bool isString) const {
81 return isString ? s->hash() : i;
85 ////////////////////////////////////////////////////////////
88 * Headers. These represent the different ways we have of representing metadata
89 * for a key.
91 * The operations they must support are:
92 * size() - Return the number of keys
93 * equals() - Check for equality against another header of the
94 * same type
95 * startHash(1 param) - Combine the hash for this header with the other
96 * hash value (used when computing a key's hash).
97 * startHash() - Obtain the hash for this header (used as the initial
98 * value when computing a key's hash)
99 * moved() - This key is being moved away. Set the key count to 0
100 * if the count isn't constant.
103 // Non-shared, fixed size case. The header is empty and is just a policy
104 // class. The number of keys is a constant.
105 template <int N> struct EmptyHeader {
106 size_t size() const { return N; }
107 bool equals(EmptyHeader) const { return true; }
108 size_t startHash(size_t firstHash) const {
109 return firstHash;
111 // Always non-empty
112 size_t startHash() const { always_assert(false); }
113 void moved() {}
115 // Shared, fixed size case. The head just stores a FuncId (to distinguish
116 // different functions), but the number of keys is a constant.
117 template <int N> struct FuncIdHeader {
118 explicit FuncIdHeader(FuncId funcId) : funcId{funcId} {}
119 size_t size() const { return N; }
120 bool equals(const FuncIdHeader& other) const {
121 return funcId == other.funcId;
123 size_t startHash(size_t firstHash) const {
124 return combineHashes(funcId.toInt(), firstHash);
126 // Always non-empty
127 size_t startHash() const { always_assert(false); }
128 void moved() {}
129 FuncId funcId;
131 // Generic case. Both the function and key count are stored (and are
132 // non-constant).
133 struct GenericHeader {
134 explicit GenericHeader(GenericMemoId::Param id) : id{id} {}
135 size_t size() const { return id.getKeyCount(); }
136 bool equals(const GenericHeader& other) const {
137 return id.asParam() == other.id.asParam();
139 size_t startHash(size_t firstHash) const {
140 return combineHashes(id.asParam(), firstHash);
142 size_t startHash() const { return id.asParam(); }
143 void moved() { id.setKeyCount(0); }
144 GenericMemoId id;
147 ////////////////////////////////////////////////////////////
149 // Fixed-size storage specialization. N is the number of keys, and H is the
150 // header to use. We derive from H to make use of the empty-base class
151 // optimization.
152 template <int N, typename H> struct FixedStorage : private H {
153 static_assert(N > 0, "FixedStorage cannot be empty");
154 using Header = H;
156 explicit FixedStorage(Header header) : Header{header}
157 { assertx(size() <= N); }
159 FixedStorage(FixedStorage&& o) noexcept
160 : Header{std::move(o)}
161 , stringTags{std::move(o.stringTags)}
163 o.moved();
164 o.stringTags.reset();
165 for (size_t i = 0; i < size(); ++i) {
166 elems[i].i = o.elems[i].i;
170 FixedStorage(const FixedStorage&) = delete;
171 FixedStorage& operator=(const FixedStorage&) = delete;
172 FixedStorage& operator=(FixedStorage&&) = delete;
174 size_t size() const { return this->Header::size(); }
175 bool isString(size_t i) const { assertx(i < size()); return stringTags[i]; }
176 void initIsString(size_t i) { assertx(i < size()); stringTags[i] = true; }
177 void initIsInt(size_t) {}
179 KeyElem& elem(size_t i) { assertx(i < size()); return elems[i]; }
180 const KeyElem& elem(size_t i) const { assertx(i < size()); return elems[i]; }
182 Header& header() { return *this; }
183 const Header& header() const { return *this; }
185 // If HasStringTags is true, we can do certain operations faster.
186 static constexpr bool HasStringTags = true;
187 bool compareStringTags(const FixedStorage& o) const {
188 return stringTags == o.stringTags;
191 template <long unsigned int M>
192 static std::bitset<N> castStringTags(const std::bitset<M>& o) {
193 static_assert(M <= N, "");
194 static_assert(N <= std::numeric_limits<unsigned long long>::digits, "");
195 return std::bitset<N>{o.to_ullong()};
198 template <long unsigned int M>
199 bool compareStringTags(const std::bitset<M>& o) const {
200 return stringTags == castStringTags(o);
202 template <long unsigned int M>
203 void setStringTags(const std::bitset<M>& o) {
204 stringTags = castStringTags(o);
207 // The key elements are a fixed size, and we use a bitset to know which ones
208 // are strings.
209 std::array<KeyElem, N> elems;
210 std::bitset<N> stringTags;
213 // Header specialization for a non-fixed number of keys. Used for generic memo
214 // caches.
215 struct UnboundStorage {
216 // We always need a GenericHeader, so there's no need to parameterize it.
217 using Header = GenericHeader;
219 explicit UnboundStorage(Header header)
220 : header_{header}
221 , data{
222 (header_.size() > 0)
223 ? req::make_raw_array<Pair>(header_.size())
224 : nullptr
228 UnboundStorage(UnboundStorage&& o) noexcept
229 : header_{std::move(o.header_)}
230 , data{o.data}
232 o.header_.moved();
233 o.data = nullptr;
236 UnboundStorage(const UnboundStorage&) = delete;
237 UnboundStorage& operator=(const UnboundStorage&) = delete;
238 UnboundStorage& operator=(UnboundStorage&&) = delete;
240 ~UnboundStorage() {
241 if (data) req::destroy_raw_array(data, header_.size());
244 size_t size() const { return header_.size(); }
245 bool isString(size_t i) const {
246 assertx(i < size());
247 return data[i].isString;
249 void initIsString(size_t i) { assertx(i < size()); data[i].isString = true; }
250 void initIsInt(size_t i) { assertx(i < size()); data[i].isString = false; }
252 KeyElem& elem(size_t i) { assertx(i < size()); return data[i].elem; }
253 const KeyElem& elem(size_t i) const {
254 assertx(i < size());
255 return data[i].elem;
258 Header& header() { return header_; }
259 const Header& header() const { return header_; }
261 // We don't store the int/string markers for keys in a compacted bitset, so we
262 // can't take advantage of some optimizations.
263 static constexpr bool HasStringTags = false;
264 template <typename T> bool compareStringTags(const T&) const {
265 always_assert(false);
267 template <typename T> void setStringTags(const T&) {
268 always_assert(false);
271 Header header_;
272 // Use a dynamically allocated array of KeyElem and bool pairs to represent
273 // the key.
274 struct Pair {
275 KeyElem elem;
276 bool isString;
277 TYPE_SCAN_CUSTOM() {
278 if (isString) scanner.scan(elem.s);
281 Pair* data;
284 ////////////////////////////////////////////////////////////
286 // The actual key. The Key is instantiated on a particular kind of storage, and
287 // all the generic operations on it are implemented here.
288 template <typename S> struct Key {
289 using Header = typename S::Header;
291 template <typename P>
292 Key(Header header, const P proxy)
293 : storage{header}
294 { proxy.initStorage(storage); }
296 Key(Key&&) = default;
297 Key(const Key&) = delete;
298 Key& operator=(const Key&) = delete;
299 Key& operator=(Key&&) = delete;
301 ~Key() {
302 // Dec-ref the strings
303 for (size_t i = 0; i < storage.size(); ++i) {
304 if (storage.isString(i)) storage.elem(i).s->decRefAndRelease();
308 bool equals(const Key& o) const {
309 // First compare the headers for equality
310 if (!storage.header().equals(o.storage.header())) return false;
311 // If the storage has string tags, we can compare all the types at once.
312 if (S::HasStringTags && !storage.compareStringTags(o.storage)) {
313 return false;
315 for (size_t i = 0; i < storage.size(); ++i) {
316 // If the storage doesn't have string tags, we need to compare the type
317 // one at a time.
318 if (!S::HasStringTags && storage.isString(i) != o.storage.isString(i)) {
319 return false;
321 if (!storage.elem(i).equals(o.storage.elem(i), storage.isString(i))) {
322 return false;
325 return true;
328 template <typename P>
329 bool equals(Header header, P proxy) const {
330 if (UNLIKELY(!storage.header().equals(header))) return false;
331 return proxy.equals(storage);
334 size_t hash() const {
335 // If the key has no elements (which can happen in generic caches), just use
336 // the hash for the header.
337 if (storage.size() <= 0) return storage.header().startHash();
338 // Otherwise, combine the hash for the first element and the header.
339 auto hash = storage.header().startHash(
340 storage.elem(0).hash(storage.isString(0))
342 // And then combine it with the rest of the hashes for the other key
343 // elements.
344 for (size_t i = 1; i < storage.size(); ++i) {
345 hash = combineHashes(
346 hash,
347 storage.elem(i).hash(storage.isString(i))
350 return hash;
353 TYPE_SCAN_CUSTOM() {
354 for (size_t i = 0, n = storage.size(); i < n; ++i) {
355 if (storage.isString(i)) scanner.scan(storage.elem(i).s);
359 S storage;
362 // Instantiations for the supported possibilities.
363 template <int N> using FixedKey = Key<FixedStorage<N, EmptyHeader<N>>>;
364 template <int N> using FixedFuncIdKey = Key<FixedStorage<N, FuncIdHeader<N>>>;
365 using UnboundKey = Key<UnboundStorage>;
367 ////////////////////////////////////////////////////////////
370 * KeyProxy is a wrapper around the pointer to the TypedValue array passed into the
371 * get/set function. It allows us to do lookups in the memo cache without having
372 * to move or transform those Cells. It comes in two flavors: KeyProxy, where
373 * the key types are not known and must be checked at runtme, and
374 * KeyProxyWithTypes, where the key types are known statically.
376 struct KeyProxy {
377 const TypedValue* keys;
379 template <typename H>
380 size_t hash(H header) const {
381 // If there's no key elements (which can happen with generic memo-caches),
382 // just use the hash from the header.
383 if (header.size() <= 0) return header.startHash();
384 auto const getHash = [](const TypedValue& c) {
385 assertx(tvIsPlausible(c));
386 assertx(isIntType(c.m_type) || isStringType(c.m_type));
387 return isIntType(c.m_type) ? c.m_data.num : c.m_data.pstr->hash();
389 // Otherwise, combine the hash from the header and the first element, and
390 // then combine that with the rest of the element hashes.
391 auto hash = header.startHash(getHash(keys[0]));
392 for (size_t i = 1; i < header.size(); ++i) {
393 hash = combineHashes(hash, getHash(keys[i]));
395 return hash;
398 template <typename S>
399 bool equals(const S& storage) const {
400 for (size_t i = 0; i < storage.size(); ++i) {
401 assertx(tvIsPlausible(keys[i]));
402 assertx(isIntType(keys[i].m_type) || isStringType(keys[i].m_type));
403 if (UNLIKELY(!storage.elem(i).equals(keys[i], storage.isString(i)))) {
404 return false;
407 return true;
410 template <typename S>
411 void initStorage(S& storage) const {
412 // Given a storage, initialize it with values from this KeyProxy. Used when
413 // we're storing a Key and need to turn a KeyProxy into a Key.
414 for (size_t i = 0; i < storage.size(); ++i) {
415 assertx(tvIsPlausible(keys[i]));
416 assertx(isIntType(keys[i].m_type) || isStringType(keys[i].m_type));
417 if (isStringType(keys[i].m_type)) {
418 keys[i].m_data.pstr->incRefCount();
419 storage.initIsString(i);
420 } else {
421 storage.initIsInt(i);
423 storage.elem(i).i = keys[i].m_data.num;
428 // Key types and count are known statically, so we can use compile-time
429 // recursion to implement most operations.
430 template <bool IsStr, bool... Rest>
431 struct KeyProxyWithTypes {
432 static constexpr auto Size = sizeof...(Rest)+1;
434 using Next = KeyProxyWithTypes<Rest...>;
436 const TypedValue* keys;
438 template <typename H>
439 size_t hash(H header) const {
440 assertx(header.size() == Size);
441 return Next{keys}.template hashRec<1>(header.startHash(hashAt<0>()));
444 template <typename S>
445 bool equals(const S& storage) const {
446 assertx(storage.size() == Size);
447 if (S::HasStringTags &&
448 UNLIKELY(!storage.compareStringTags(makeBitset()))) {
449 return false;
451 return equalsRec<0>(storage);
454 template <typename S>
455 void initStorage(S& storage) const {
456 assertx(storage.size() == Size);
457 if (S::HasStringTags) storage.setStringTags(makeBitset());
458 initStorageRec<0>(storage);
461 template <int N> size_t hashRec(size_t hash) const {
462 return Next{keys}.template hashRec<N+1>(combineHashes(hash, hashAt<N>()));
465 template <int N, typename S>
466 bool equalsRec(const S& storage) const {
467 assertx(tvIsPlausible(keys[N]));
468 assertx(!IsStr || isStringType(keys[N].m_type));
469 assertx(IsStr || isIntType(keys[N].m_type));
470 if (!S::HasStringTags && UNLIKELY(storage.isString(N) != IsStr)) {
471 return false;
473 assertx(!S::HasStringTags || storage.isString(N) == IsStr);
474 if (IsStr) {
475 auto const s = keys[N].m_data.pstr;
476 auto const s2 = storage.elem(N).s;
477 if (UNLIKELY(s != s2 && !s->same(s2))) return false;
478 } else if (UNLIKELY(storage.elem(N).i != keys[N].m_data.num)) {
479 return false;
481 return Next{keys}.template equalsRec<N+1>(storage);
484 template <int N, typename S>
485 void initStorageRec(S& storage) const {
486 assertx(tvIsPlausible(keys[N]));
487 assertx(!IsStr || isStringType(keys[N].m_type));
488 assertx(IsStr || isIntType(keys[N].m_type));
489 if (IsStr) {
490 if (!S::HasStringTags) storage.initIsString(N);
491 keys[N].m_data.pstr->incRefCount();
492 } else if (!S::HasStringTags) {
493 storage.initIsInt(N);
495 storage.elem(N).i = keys[N].m_data.num;
496 Next{keys}.template initStorageRec<N+1>(storage);
499 template <int N> size_t hashAt() const {
500 assertx(tvIsPlausible(keys[N]));
501 assertx(!IsStr || isStringType(keys[N].m_type));
502 assertx(IsStr || isIntType(keys[N].m_type));
503 return IsStr ? keys[N].m_data.pstr->hash() : keys[N].m_data.num;
506 static constexpr std::bitset<Size> makeBitset() {
507 std::bitset<Size> b;
508 makeBitsetRec<0, Size>(b);
509 return b;
512 template <int M, int N>
513 static constexpr void makeBitsetRec(std::bitset<N>& b) {
514 b[M] = IsStr;
515 Next::template makeBitsetRec<M+1, N>(b);
519 // Base case for recursion. KeyProxy with one element.
520 template <bool IsStr> struct KeyProxyWithTypes<IsStr> {
521 const TypedValue* keys;
523 template <typename H>
524 size_t hash(H header) const {
525 assertx(header.size() == 1);
526 return header.startHash(hashAt<0>());
529 template <typename S>
530 bool equals(const S& storage) const {
531 assertx(storage.size() == 1);
532 if (S::HasStringTags &&
533 UNLIKELY(!storage.compareStringTags(makeBitset()))) {
534 return false;
536 return equalsRec<0>(storage);
539 template <typename S>
540 void initStorage(S& storage) const {
541 assertx(storage.size() == 1);
542 if (S::HasStringTags) storage.setStringTags(makeBitset());
543 initStorageRec<0>(storage);
546 template <int N> size_t hashRec(size_t hash) const {
547 return combineHashes(hash, hashAt<N>());
550 template <int N, typename S>
551 bool equalsRec(const S& storage) const {
552 assertx(tvIsPlausible(keys[N]));
553 assertx(!IsStr || isStringType(keys[N].m_type));
554 assertx(IsStr || isIntType(keys[N].m_type));
555 if (!S::HasStringTags && UNLIKELY(storage.isString(N) != IsStr)) {
556 return false;
558 assertx(!S::HasStringTags || storage.isString(N) == IsStr);
559 if (IsStr) {
560 auto const s = keys[N].m_data.pstr;
561 auto const s2 = storage.elem(N).s;
562 return s == s2 || s->same(s2);
564 return storage.elem(N).i == keys[N].m_data.num;
567 template <int N, typename S>
568 void initStorageRec(S& storage) const {
569 assertx(tvIsPlausible(keys[N]));
570 assertx(!IsStr || isStringType(keys[N].m_type));
571 assertx(IsStr || isIntType(keys[N].m_type));
572 if (IsStr) {
573 if (!S::HasStringTags) storage.initIsString(N);
574 keys[N].m_data.pstr->incRefCount();
575 } else if (!S::HasStringTags) {
576 storage.initIsInt(N);
578 storage.elem(N).i = keys[N].m_data.num;
581 template <int N> size_t hashAt() const {
582 assertx(tvIsPlausible(keys[N]));
583 assertx(!IsStr || isStringType(keys[N].m_type));
584 assertx(IsStr || isIntType(keys[N].m_type));
585 return IsStr ? keys[N].m_data.pstr->hash() : keys[N].m_data.num;
588 static constexpr std::bitset<1> makeBitset() {
589 std::bitset<1> b;
590 makeBitsetRec<0, 1>(b);
591 return b;
594 template <int M, int N>
595 static constexpr void makeBitsetRec(std::bitset<N>& b) { b[M] = IsStr; }
598 ////////////////////////////////////////////////////////////
600 // Equality and hasher functions. We mark them as "transparent" to allow for
601 // mixed-type lookups.
603 template <typename K> struct KeyEquals {
604 using is_transparent = void;
606 template <typename P>
607 bool operator()(std::tuple<typename K::Header, P> k1,
608 const K& k2) const {
609 return LIKELY(k2.equals(std::get<0>(k1), std::get<1>(k1)));
611 bool operator()(const K& k1, const K& k2) const {
612 return k1.equals(k2);
616 template <typename K> struct KeyHasher {
617 using is_transparent = void;
619 template <typename P>
620 size_t operator()(std::tuple<typename K::Header, P> k) const {
621 return std::get<1>(k).hash(std::get<0>(k));
623 size_t operator()(const K& k) const { return k.hash(); }
626 // The SharedOnlyKey has already been hashed, so its an identity here.
627 struct SharedOnlyKeyHasher {
628 size_t operator()(SharedOnlyKey k) const { return k; }
631 ////////////////////////////////////////////////////////////
633 // Wrapper around a TypedValue to handle the ref-count manipulations for us.
634 struct TVWrapper {
635 explicit TVWrapper(TypedValue value) : value{value}
636 { tvIncRefGen(value); }
637 TVWrapper(TVWrapper&& o) noexcept: value{o.value} {
638 o.value = make_tv<KindOfNull>();
640 TVWrapper(const TVWrapper&) = delete;
641 TVWrapper& operator=(const TVWrapper&) = delete;
642 TVWrapper& operator=(TVWrapper&& o) noexcept {
643 std::swap(value, o.value);
644 return *this;
646 ~TVWrapper() { tvDecRefGen(value); }
647 TypedValue value;
650 ////////////////////////////////////////////////////////////
656 namespace folly {
658 // Mark the SharedOnlyKeyHasher as avalanching so that F14 doesn't use the bit
659 // mixer on it.
660 template <typename K>
661 struct IsAvalanchingHasher<HPHP::memoCacheDetail::SharedOnlyKeyHasher, K>
662 : std::true_type {};
666 namespace HPHP {
668 namespace memoCacheDetail {
670 ////////////////////////////////////////////////////////////
672 // For the specialized and generic caches
673 template <typename K> struct MemoCache : MemoCacheBase {
674 using Cache = folly::F14ValueMap<
676 TVWrapper,
677 KeyHasher<K>,
678 KeyEquals<K>,
679 req::ConservativeAllocator<std::pair<const K, TVWrapper>>
681 Cache cache;
683 MemoCache() = default;
684 MemoCache(const MemoCache&) = delete;
685 MemoCache(MemoCache&&) = delete;
686 MemoCache& operator=(const MemoCache&) = delete;
687 MemoCache& operator=(MemoCache&&) = delete;
689 TYPE_SCAN_CUSTOM() {
690 using value_type = typename Cache::value_type; // pair
691 cache.visitContiguousRanges(
692 [&](value_type const* start, value_type const* end) {
693 scanner.scan(*start, (const char*)end - (const char*)start);
698 // For the shared-only caches (which do not need any of the key machinery).
699 struct SharedOnlyMemoCache : MemoCacheBase {
700 using Cache = folly::F14ValueMap<
701 SharedOnlyKey,
702 TVWrapper,
703 SharedOnlyKeyHasher,
704 std::equal_to<SharedOnlyKey>,
705 req::ConservativeAllocator<std::pair<const SharedOnlyKey, TVWrapper>>
707 Cache cache;
709 SharedOnlyMemoCache() = default;
710 SharedOnlyMemoCache(const SharedOnlyMemoCache&) = delete;
711 SharedOnlyMemoCache(SharedOnlyMemoCache&&) = delete;
712 SharedOnlyMemoCache& operator=(const SharedOnlyMemoCache&) = delete;
713 SharedOnlyMemoCache& operator=(SharedOnlyMemoCache&&) = delete;
715 TYPE_SCAN_CUSTOM() {
716 using value_type = typename Cache::value_type; // pair
717 cache.visitContiguousRanges(
718 [&](value_type const* start, value_type const* end) {
719 scanner.scan(*start, (const char*)end - (const char*)start);
726 ////////////////////////////////////////////////////////////
728 using namespace memoCacheDetail;
730 namespace {
732 // Helper functions. Given a pointer to a MemoCacheBase, return a pointer to the
733 // appropriate F14 cache embedded within it. If we're in debug builds, we'll use
734 // dynamic-cast for this to catch places where someone might be mixing different
735 // cache types on the same pointer. Otherwise, we'll just use static cast.
737 template <typename C>
738 ALWAYS_INLINE
739 auto const& getCache(const MemoCacheBase* base) {
740 if (debug) {
741 auto ptr = dynamic_cast<const C*>(base);
742 always_assert(ptr != nullptr);
743 return ptr->cache;
745 return static_cast<const C*>(base)->cache;
749 template <typename C>
750 ALWAYS_INLINE
751 auto& getCache(MemoCacheBase* base) {
752 if (debug) {
753 auto ptr = dynamic_cast<C*>(base);
754 always_assert(ptr != nullptr);
755 return ptr->cache;
757 return static_cast<C*>(base)->cache;
760 // The get and set logic for all the different cache types is the same, so
761 // factor it out into these helper functions.
763 template <typename K, typename P>
764 ALWAYS_INLINE
765 const TypedValue* getImpl(const MemoCacheBase* base,
766 typename K::Header header,
767 P keys) {
768 auto const& cache = getCache<MemoCache<K>>(base);
769 auto const it = cache.find(std::make_tuple(header, keys));
770 if (it == cache.end()) return nullptr;
771 assertx(tvIsPlausible(it->second.value));
772 assertx(it->second.value.m_type != KindOfUninit);
773 return &it->second.value;
776 template <typename K, typename P>
777 ALWAYS_INLINE
778 void setImpl(MemoCacheBase*& base,
779 typename K::Header header,
780 P keys,
781 TypedValue val) {
782 assertx(tvIsPlausible(val));
783 assertx(val.m_type != KindOfUninit);
784 if (!base) base = req::make_raw<MemoCache<K>>();
785 auto& cache = getCache<MemoCache<K>>(base);
786 cache.insert_or_assign(K{header, keys}, TVWrapper{val});
791 ////////////////////////////////////////////////////////////
793 // Getter and setter implementations. These all just delegate to the above
794 // getter/setter helpers, instantiated on the appropriate key and key-proxy
795 // type.
797 template <bool... IsStr>
798 static const TypedValue* memoCacheGet(const MemoCacheBase* base,
799 const TypedValue* keys) {
800 return getImpl<FixedKey<sizeof...(IsStr)>>(
801 base,
802 typename FixedKey<sizeof...(IsStr)>::Header{},
803 KeyProxyWithTypes<IsStr...>{keys}
807 template <int N>
808 static const TypedValue* memoCacheGetGenericKeys(const MemoCacheBase* base,
809 const TypedValue* keys) {
810 return getImpl<FixedKey<N>>(
811 base,
812 typename FixedKey<N>::Header{},
813 KeyProxy{keys}
817 template <bool... IsStr>
818 static const TypedValue* memoCacheGetShared(const MemoCacheBase* base,
819 FuncId funcId,
820 const TypedValue* keys) {
821 return getImpl<FixedFuncIdKey<sizeof...(IsStr)>>(
822 base,
823 typename FixedFuncIdKey<sizeof...(IsStr)>::Header{funcId},
824 KeyProxyWithTypes<IsStr...>{keys}
828 template <int N>
829 static const TypedValue* memoCacheGetSharedGenericKeys(const MemoCacheBase* base,
830 FuncId funcId,
831 const TypedValue* keys) {
832 return getImpl<FixedFuncIdKey<N>>(
833 base,
834 typename FixedFuncIdKey<N>::Header{funcId},
835 KeyProxy{keys}
839 const TypedValue* memoCacheGetGeneric(MemoCacheBase* base,
840 GenericMemoId::Param id,
841 const TypedValue* keys) {
842 return getImpl<UnboundKey>(
843 base,
844 UnboundKey::Header{id},
845 KeyProxy{keys}
849 const TypedValue* memoCacheGetSharedOnly(const MemoCacheBase* base,
850 SharedOnlyKey key) {
851 auto const& cache = getCache<SharedOnlyMemoCache>(base);
852 auto const it = cache.find(key);
853 if (it == cache.end()) return nullptr;
854 assertx(tvIsPlausible(it->second.value));
855 assertx(it->second.value.m_type != KindOfUninit);
856 return &it->second.value;
859 template <bool... IsStr>
860 static void memoCacheSet(MemoCacheBase*& base,
861 const TypedValue* keys,
862 TypedValue val) {
863 setImpl<FixedKey<sizeof...(IsStr)>>(
864 base,
865 typename FixedKey<sizeof...(IsStr)>::Header{},
866 KeyProxyWithTypes<IsStr...>{keys},
871 template <int N>
872 static void memoCacheSetGenericKeys(MemoCacheBase*& base,
873 const TypedValue* keys,
874 TypedValue val) {
875 setImpl<FixedKey<N>>(
876 base,
877 typename FixedKey<N>::Header{},
878 KeyProxy{keys},
883 template <bool... IsStr>
884 static void memoCacheSetShared(MemoCacheBase*& base,
885 FuncId funcId,
886 const TypedValue* keys,
887 TypedValue val) {
888 setImpl<FixedFuncIdKey<sizeof...(IsStr)>>(
889 base,
890 typename FixedFuncIdKey<sizeof...(IsStr)>::Header{funcId},
891 KeyProxyWithTypes<IsStr...>{keys},
896 template <int N>
897 static void memoCacheSetSharedGenericKeys(MemoCacheBase*& base,
898 FuncId funcId,
899 const TypedValue* keys,
900 TypedValue val) {
901 setImpl<FixedFuncIdKey<N>>(
902 base,
903 typename FixedFuncIdKey<N>::Header{funcId},
904 KeyProxy{keys},
909 void memoCacheSetGeneric(MemoCacheBase*& base,
910 GenericMemoId::Param id,
911 const TypedValue* keys,
912 TypedValue val) {
913 setImpl<UnboundKey>(
914 base,
915 UnboundKey::Header{id},
916 KeyProxy{keys},
921 void memoCacheSetSharedOnly(MemoCacheBase*& base,
922 SharedOnlyKey key,
923 TypedValue val) {
924 assertx(tvIsPlausible(val));
925 assertx(val.m_type != KindOfUninit);
926 if (!base) base = req::make_raw<SharedOnlyMemoCache>();
927 auto& cache = getCache<SharedOnlyMemoCache>(base);
928 cache.insert_or_assign(key, TVWrapper{val});
931 ////////////////////////////////////////////////////////////
933 namespace {
935 // We'll generate specialized getters and setters for every possible combination
936 // of key types up to this limit. This causes an exponential blow up in
937 // functions (and in compile-time) so we want to be careful about increasing it.
938 static constexpr size_t kMemoCacheMaxSpecializedKeyTypes = 4;
939 static_assert(
940 kMemoCacheMaxSpecializedKeyTypes <= kMemoCacheMaxSpecializedKeys,
944 // Use a macro to generate a "builder" class, which is responsible for taking
945 // key types and key counts, and returning the appropriate getter or setter.
947 #define O(Type, Name, Func) \
948 template <int, typename = void> struct Name##Builder; \
949 template <int M> \
950 struct Name##Builder<M, \
951 std::enable_if_t< \
952 (M <= kMemoCacheMaxSpecializedKeyTypes)>> { \
953 template <int N, bool... IsStr> \
954 struct FromTypes { \
955 static Type get(const bool* types, size_t count) { \
956 if (types[count - N]) { \
957 return FromTypes<N-1, IsStr..., true>::get(types, count); \
958 } else { \
959 return FromTypes<N-1, IsStr..., false>::get(types, count); \
961 }; \
962 }; \
964 template <bool... IsStr> \
965 struct FromTypes<0, IsStr...> { \
966 static Type get(const bool*, size_t) { \
967 return Func<IsStr...>; \
969 }; \
971 static Type get(const bool* types, size_t count) { \
972 if (count == M) return FromTypes<M>::get(types, count); \
973 return Name##Builder<M-1>::get(types, count); \
975 static Type get(size_t count) { \
976 if (count == M) return Func##GenericKeys<M>; \
977 return Name##Builder<M-1>::get(count); \
979 }; \
980 template <int M> \
981 struct Name##Builder<M, \
982 std::enable_if_t< \
983 (M > kMemoCacheMaxSpecializedKeyTypes)>> { \
984 static Type get(const bool* types, size_t count) { \
985 if (count == M) return Func##GenericKeys<M>; \
986 return Name##Builder<M-1>::get(types, count); \
988 static Type get(size_t count) { \
989 if (count == M) return Func##GenericKeys<M>; \
990 return Name##Builder<M-1>::get(count); \
992 }; \
993 template <> struct Name##Builder<0> { \
994 static Type get(const bool*, size_t) { return nullptr; } \
995 static Type get(size_t) { return nullptr; } \
998 // Actually create the builders
999 O(MemoCacheGetter, MemoCacheGet, memoCacheGet);
1000 O(MemoCacheSetter, MemoCacheSet, memoCacheSet);
1001 O(SharedMemoCacheGetter, MemoCacheGetShared, memoCacheGetShared);
1002 O(SharedMemoCacheSetter, MemoCacheSetShared, memoCacheSetShared);
1004 #undef O
1008 MemoCacheGetter memoCacheGetForKeyTypes(const bool* types, size_t count) {
1009 return MemoCacheGetBuilder<kMemoCacheMaxSpecializedKeys>::get(types, count);
1011 MemoCacheGetter memoCacheGetForKeyCount(size_t count) {
1012 return MemoCacheGetBuilder<kMemoCacheMaxSpecializedKeys>::get(count);
1015 MemoCacheSetter memoCacheSetForKeyTypes(const bool* types, size_t count) {
1016 return MemoCacheSetBuilder<kMemoCacheMaxSpecializedKeys>::get(types, count);
1018 MemoCacheSetter memoCacheSetForKeyCount(size_t count) {
1019 return MemoCacheSetBuilder<kMemoCacheMaxSpecializedKeys>::get(count);
1022 SharedMemoCacheGetter sharedMemoCacheGetForKeyTypes(const bool* types,
1023 size_t count) {
1024 return MemoCacheGetSharedBuilder<kMemoCacheMaxSpecializedKeys>::get(
1025 types, count
1028 SharedMemoCacheGetter sharedMemoCacheGetForKeyCount(size_t count) {
1029 return MemoCacheGetSharedBuilder<kMemoCacheMaxSpecializedKeys>::get(count);
1031 SharedMemoCacheSetter sharedMemoCacheSetForKeyTypes(const bool* types,
1032 size_t count) {
1033 return MemoCacheSetSharedBuilder<kMemoCacheMaxSpecializedKeys>::get(
1034 types, count
1037 SharedMemoCacheSetter sharedMemoCacheSetForKeyCount(size_t count) {
1038 return MemoCacheSetSharedBuilder<kMemoCacheMaxSpecializedKeys>::get(count);