2 +----------------------------------------------------------------------+
4 +----------------------------------------------------------------------+
5 | Copyright (c) 2010-present Facebook, Inc. (http://www.facebook.com) |
6 +----------------------------------------------------------------------+
7 | This source file is subject to version 3.01 of the PHP license, |
8 | that is bundled with this package in the file LICENSE, and is |
9 | available through the world-wide-web at the following url: |
10 | http://www.php.net/license/3_01.txt |
11 | If you did not receive a copy of the PHP license and are unable to |
12 | obtain it through the world-wide-web, please send a note to |
13 | license@php.net so we can mail you a copy immediately. |
14 +----------------------------------------------------------------------+
17 #include "hphp/runtime/vm/jit/prof-data.h"
22 #include <folly/MapUtil.h>
24 #include "hphp/util/logger.h"
26 #include "hphp/runtime/vm/jit/normalized-instruction.h"
27 #include "hphp/runtime/vm/jit/region-selection.h"
28 #include "hphp/runtime/vm/jit/translator.h"
29 #include "hphp/runtime/vm/treadmill.h"
30 #include "hphp/runtime/vm/verifier/cfg.h"
32 namespace HPHP
{ namespace jit
{
36 ////////////////////////////////////////////////////////////////////////////////
38 ProfTransRec::ProfTransRec(SrcKey lastSk
, SrcKey sk
, RegionDescPtr region
,
40 : m_kind(TransKind::Profile
)
45 assertx(region
!= nullptr && !region
->empty() && region
->start() == sk
);
48 ProfTransRec::ProfTransRec(SrcKey sk
, int nArgs
, uint32_t asmSize
)
49 : m_kind(TransKind::ProfPrologue
)
51 , m_prologueArgs(nArgs
)
55 m_callers
= std::make_unique
<CallerRec
>();
58 ProfTransRec::~ProfTransRec() {
59 if (m_kind
== TransKind::Profile
) {
60 m_region
.~RegionDescPtr();
63 assertx(m_kind
== TransKind::ProfPrologue
);
64 m_callers
.~CallerRecPtr();
67 ////////////////////////////////////////////////////////////////////////////////
69 template<typename Map
>
70 typename
Map::Config
makeAHMConfig() {
71 typename
Map::Config config
;
72 config
.growthFactor
= 1;
73 config
.entryCountThreadCacheSize
= 10;
78 : m_counters(RuntimeOption::ServerExecutionMode()
79 ? std::numeric_limits
<int64_t>::max()
80 : RuntimeOption::EvalJitPGOThreshold
)
81 , m_profilingFuncs(RuntimeOption::EvalPGOFuncCountHint
,
82 makeAHMConfig
<decltype(m_profilingFuncs
)>())
83 , m_optimizedSKs(RuntimeOption::EvalPGOFuncCountHint
,
84 makeAHMConfig
<decltype(m_optimizedSKs
)>())
85 , m_proflogueDB(RuntimeOption::EvalPGOFuncCountHint
* 2,
86 makeAHMConfig
<decltype(m_proflogueDB
)>())
87 , m_dvFuncletDB(RuntimeOption::EvalPGOFuncCountHint
* 2,
88 makeAHMConfig
<decltype(m_dvFuncletDB
)>())
89 , m_jmpToTransID(RuntimeOption::EvalPGOFuncCountHint
* 10,
90 makeAHMConfig
<decltype(m_jmpToTransID
)>())
91 , m_blockEndOffsets(RuntimeOption::EvalPGOFuncCountHint
,
92 makeAHMConfig
<decltype(m_blockEndOffsets
)>())
95 TransID
ProfData::allocTransID() {
96 folly::SharedMutex::WriteHolder lock
{m_transLock
};
97 m_transRecs
.emplace_back();
98 return m_transRecs
.size() - 1;
101 TransID
ProfData::proflogueTransId(const Func
* func
, int nArgs
) const {
102 auto const numParams
= func
->numNonVariadicParams();
103 if (nArgs
> numParams
) nArgs
= numParams
+ 1;
105 return folly::get_default(
107 PrologueID
{func
->getFuncId(), nArgs
},
112 TransID
ProfData::dvFuncletTransId(SrcKey sk
) const {
113 return folly::get_default(
120 void ProfData::addTransProfile(TransID transID
,
121 const RegionDescPtr
& region
,
122 const PostConditions
& pconds
,
124 auto const lastSk
= region
->lastSrcKey();
127 DEBUG_ONLY
auto const nBlocks
= region
->blocks().size();
128 assertx(nBlocks
== 1);
129 region
->renumberBlock(region
->entry()->id(), transID
);
130 for (auto& b
: region
->blocks()) b
->setProfTransID(transID
);
131 region
->blocks().back()->setPostConds(pconds
);
132 auto const startSk
= region
->start();
134 // If the translation corresponds to a DV Funclet, then add an entry
136 auto const func
= startSk
.func();
137 auto const funcId
= func
->getFuncId();
138 auto const bcOffset
= startSk
.offset();
140 if (func
->isDVEntry(bcOffset
)) {
141 // Normal DV funclets don't have type guards, and thus have a single
142 // translation. However, some special functions written in hhas
143 // (e.g. array_map) have complex DV funclets that get retranslated for
144 // different types. For those functions, m_dvFuncletDB keeps the TransID
145 // for their first translation.
146 m_dvFuncletDB
.emplace(startSk
.toAtomicInt(), transID
);
150 folly::SharedMutex::WriteHolder lock
{m_transLock
};
151 m_transRecs
[transID
].reset(new ProfTransRec(lastSk
, startSk
, region
,
155 // Putting transID in m_funcProfTrans makes it visible to other threads, so
156 // this has to happen after we've already put its metadata in m_transRecs.
157 folly::SharedMutex::WriteHolder lock
{m_funcProfTransLock
};
158 m_funcProfTrans
[funcId
].push_back(transID
);
161 void ProfData::addTransProfPrologue(TransID transID
, SrcKey sk
, int nArgs
,
163 m_proflogueDB
.emplace(PrologueID
{sk
.funcID(), nArgs
}, transID
);
165 folly::SharedMutex::WriteHolder lock
{m_transLock
};
166 m_transRecs
[transID
].reset(new ProfTransRec(sk
, nArgs
, asmSize
));
169 void ProfData::addProfTrans(TransID transID
,
170 std::unique_ptr
<ProfTransRec
> ptr
) {
171 assertx(transID
>= m_transRecs
.size());
172 if (transID
> m_transRecs
.size()) m_transRecs
.resize(transID
);
173 auto const sk
= ptr
->srcKey();
174 if (ptr
->kind() == TransKind::Profile
) {
175 if (sk
.func()->isDVEntry(sk
.offset())) {
176 m_dvFuncletDB
.emplace(sk
.toAtomicInt(), transID
);
178 m_funcProfTrans
[sk
.funcID()].push_back(transID
);
180 m_proflogueDB
.emplace(PrologueID
{sk
.funcID(), ptr
->prologueArgs()},
183 m_transRecs
.emplace_back(std::move(ptr
));
186 bool ProfData::anyBlockEndsAt(const Func
* func
, Offset offset
) {
187 auto it
= m_blockEndOffsets
.find(func
->getFuncId().toInt());
188 if (it
== m_blockEndOffsets
.end()) {
190 Verifier::GraphBuilder builder
{arena
, func
};
191 auto cfg
= builder
.build();
192 jit::fast_set
<Offset
> offsets
;
194 for (auto blocks
= linearBlocks(cfg
); !blocks
.empty(); ) {
195 auto last
= blocks
.popFront()->last
- func
->entry();
196 offsets
.insert(last
);
199 it
= m_blockEndOffsets
.emplace(func
->getFuncId().toInt(),
200 std::move(offsets
)).first
;
203 return it
->second
.count(offset
);
206 ////////////////////////////////////////////////////////////////////////////////
209 std::atomic
<ProfData
*> s_profData
{nullptr};
210 struct ProfDataShutdownDeleter
{
211 ~ProfDataShutdownDeleter() {
212 delete s_profData
.load();
214 } s_profDataShutdownDeleter
;
217 * Used to free ProfData from the Treadmill.
219 struct ProfDataTreadmillDeleter
{
221 if (RuntimeOption::ServerExecutionMode()) {
222 Logger::Info("Deleting JIT ProfData");
226 std::unique_ptr
<ProfData
> data
;
230 std::atomic_bool
ProfData::s_triedDeserialization
{false};
231 std::atomic_bool
ProfData::s_wasDeserialized
{false};
232 std::atomic
<StringData
*> ProfData::s_buildHost
{nullptr};
233 std::atomic
<StringData
*> ProfData::s_tag
{nullptr};
234 std::atomic
<int64_t> ProfData::s_buildTime
{0};
236 RDS_LOCAL_NO_CHECK(ProfData
*, rl_profData
)(nullptr);
238 void processInitProfData() {
239 if (!RuntimeOption::EvalJitPGO
) return;
241 s_profData
.store(new ProfData(), std::memory_order_relaxed
);
244 void requestInitProfData() {
245 *rl_profData
= s_profData
.load(std::memory_order_relaxed
);
248 void requestExitProfData() {
249 *rl_profData
= nullptr;
252 const ProfData
* globalProfData() {
253 return s_profData
.load(std::memory_order_relaxed
);
256 void discardProfData() {
257 if (s_profData
.load(std::memory_order_relaxed
) == nullptr) return;
259 // Make sure s_profData is nullptr so any new requests won't try to use the
260 // object we're deleting, then send it to the Treadmill for deletion.
261 std::unique_ptr
<ProfData
> data
{
262 s_profData
.exchange(nullptr, std::memory_order_relaxed
)
264 if (data
!= nullptr) {
265 if (RuntimeOption::ServerExecutionMode()) {
266 Logger::Info("Putting JIT ProfData on Treadmill");
268 Treadmill::enqueue(ProfDataTreadmillDeleter
{std::move(data
)});
272 void ProfData::maybeResetCounters() {
273 if (m_countersReset
.load(std::memory_order_acquire
)) return;
274 if (requestCount() < RuntimeOption::EvalJitResetProfCountersRequest
) return;
276 folly::SharedMutex::WriteHolder lock
{m_transLock
};
277 if (m_countersReset
.load(std::memory_order_relaxed
)) return;
278 m_counters
.resetAllCounters(RuntimeOption::EvalJitPGOThreshold
);
279 m_countersReset
.store(true, std::memory_order_release
);
282 void ProfData::addTargetProfile(const ProfData::TargetProfileInfo
& info
) {
283 folly::SharedMutex::WriteHolder lock
{m_targetProfilesLock
};
284 m_targetProfiles
[info
.key
.transId
].push_back(info
);
287 std::vector
<ProfData::TargetProfileInfo
> ProfData::getTargetProfiles(
288 TransID transID
) const {
289 folly::SharedMutex::ReadHolder lock
{m_targetProfilesLock
};
290 auto it
= m_targetProfiles
.find(transID
);
291 if (it
!= m_targetProfiles
.end()) {
294 return std::vector
<TargetProfileInfo
>{};
298 ////////////////////////////////////////////////////////////////////////////////