Use makePseudoMainExit in more places
[hiphop-php.git] / hphp / runtime / vm / jit / hhbc-translator.cpp
blob0bda91e29c531c91aa106e388b1403fb6035dd53
1 /*
2 +----------------------------------------------------------------------+
3 | HipHop for PHP |
4 +----------------------------------------------------------------------+
5 | Copyright (c) 2010-2014 Facebook, Inc. (http://www.facebook.com) |
6 +----------------------------------------------------------------------+
7 | This source file is subject to version 3.01 of the PHP license, |
8 | that is bundled with this package in the file LICENSE, and is |
9 | available through the world-wide-web at the following url: |
10 | http://www.php.net/license/3_01.txt |
11 | If you did not receive a copy of the PHP license and are unable to |
12 | obtain it through the world-wide-web, please send a note to |
13 | license@php.net so we can mail you a copy immediately. |
14 +----------------------------------------------------------------------+
16 #include "hphp/runtime/vm/jit/hhbc-translator.h"
17 #include <algorithm>
18 #include <vector>
20 #include "folly/CpuId.h"
21 #include "folly/Optional.h"
23 #include "hphp/util/trace.h"
24 #include "hphp/runtime/ext/ext_closure.h"
25 #include "hphp/runtime/ext/ext_generator.h"
26 #include "hphp/runtime/ext/asio/wait_handle.h"
27 #include "hphp/runtime/ext/asio/async_function_wait_handle.h"
28 #include "hphp/runtime/base/stats.h"
29 #include "hphp/runtime/vm/repo.h"
30 #include "hphp/runtime/vm/repo-global-data.h"
31 #include "hphp/runtime/vm/unit.h"
32 #include "hphp/runtime/vm/instance-bits.h"
33 #include "hphp/runtime/vm/runtime.h"
34 #include "hphp/runtime/vm/jit/code-gen-helpers.h"
35 #include "hphp/runtime/vm/jit/ir-unit.h"
36 #include "hphp/runtime/vm/jit/normalized-instruction.h"
37 #include "hphp/runtime/vm/jit/translator-inline.h"
38 #include "hphp/runtime/vm/jit/mc-generator.h"
39 #include "hphp/runtime/vm/jit/target-profile.h"
41 // Include last to localize effects to this file
42 #include "hphp/util/assert-throw.h"
44 namespace HPHP { namespace JIT {
46 TRACE_SET_MOD(hhir);
48 //////////////////////////////////////////////////////////////////////
50 namespace {
52 bool classIsUnique(const Class* cls) {
53 return RuntimeOption::RepoAuthoritative &&
54 cls &&
55 (cls->attrs() & AttrUnique);
58 bool classIsUniqueNormalClass(const Class* cls) {
59 return classIsUnique(cls) &&
60 !(cls->attrs() & (AttrInterface | AttrTrait));
63 bool classIsUniqueInterface(const Class* cls) {
64 return classIsUnique(cls) &&
65 (cls->attrs() & AttrInterface);
70 //////////////////////////////////////////////////////////////////////
72 HhbcTranslator::HhbcTranslator(TransContext context)
73 : m_context(context)
74 , m_unit(context)
75 , m_irb(new IRBuilder(context.initSpOffset, m_unit, context.func))
76 , m_bcStateStack { BcState(context.initBcOffset,
77 context.resumed,
78 context.func) }
79 , m_lastBcOff{false}
80 , m_hasExit{false}
81 , m_mode{IRGenMode::Trace}
83 updateMarker();
84 auto const fp = gen(DefFP);
85 gen(DefSP, StackOffset{context.initSpOffset}, fp);
88 void HhbcTranslator::setGenMode(IRGenMode mode) {
89 m_mode = mode;
92 bool HhbcTranslator::classIsUniqueOrCtxParent(const Class* cls) const {
93 if (!cls) return false;
94 if (classIsUnique(cls)) return true;
95 if (!curClass()) return false;
96 return curClass()->classof(cls);
99 bool HhbcTranslator::classIsPersistentOrCtxParent(const Class* cls) const {
100 if (!cls) return false;
101 if (classHasPersistentRDS(cls)) return true;
102 if (!curClass()) return false;
103 return curClass()->classof(cls);
106 ArrayData* HhbcTranslator::lookupArrayId(int arrId) {
107 return curUnit()->lookupArrayId(arrId);
110 StringData* HhbcTranslator::lookupStringId(int strId) {
111 return curUnit()->lookupLitstrId(strId);
114 Func* HhbcTranslator::lookupFuncId(int funcId) {
115 return curUnit()->lookupFuncId(funcId);
118 PreClass* HhbcTranslator::lookupPreClassId(int preClassId) {
119 return curUnit()->lookupPreClassId(preClassId);
122 const NamedEntityPair& HhbcTranslator::lookupNamedEntityPairId(int id) {
123 return curUnit()->lookupNamedEntityPairId(id);
126 const NamedEntity* HhbcTranslator::lookupNamedEntityId(int id) {
127 return curUnit()->lookupNamedEntityId(id);
130 SSATmp* HhbcTranslator::push(SSATmp* tmp) {
131 assert(tmp);
132 FTRACE(2, "HhbcTranslator pushing {}\n", *tmp->inst());
133 m_irb->evalStack().push(tmp);
134 return tmp;
137 SSATmp* HhbcTranslator::pushIncRef(SSATmp* tmp, TypeConstraint tc) {
138 m_irb->constrainValue(tmp, tc);
139 gen(IncRef, tmp);
140 return push(tmp);
143 SSATmp* HhbcTranslator::pop(Type type, TypeConstraint tc) {
144 SSATmp* opnd = m_irb->evalStack().pop();
145 m_irb->constrainValue(opnd, tc);
147 if (opnd == nullptr) {
148 uint32_t stackOff = m_irb->stackDeficit();
149 m_irb->incStackDeficit();
150 m_irb->constrainStack(stackOff, tc);
151 auto value = gen(LdStack, type, StackOffset(stackOff), m_irb->sp());
152 FTRACE(2, "HhbcTranslator popping {}\n", *value->inst());
153 return value;
156 FTRACE(2, "HhbcTranslator popping {}\n", *opnd->inst());
157 return opnd;
160 void HhbcTranslator::discard(unsigned n) {
161 for (unsigned i = 0; i < n; ++i) {
162 pop(Type::StackElem, DataTypeGeneric); // don't care about the values
166 // type is the type expected on the stack.
167 void HhbcTranslator::popDecRef(Type type, TypeConstraint tc) {
168 if (SSATmp* src = m_irb->evalStack().pop()) {
169 m_irb->constrainValue(src, tc);
170 gen(DecRef, src);
171 return;
174 m_irb->constrainStack(m_irb->stackDeficit(), tc);
175 gen(DecRefStack, StackOffset(m_irb->stackDeficit()), type, m_irb->sp());
176 m_irb->incStackDeficit();
179 // We don't know what type description to expect for the stack
180 // locations before index, so we use a generic type when popping the
181 // intermediate values. If it ends up creating a new LdStack,
182 // refineType during a later pop() or top() will fix up the type to
183 // the known type.
184 void HhbcTranslator::extendStack(uint32_t index, Type type) {
185 // DataTypeGeneric is used in here because nobody's actually looking at the
186 // values, we're just inserting LdStacks into the eval stack to be consumed
187 // elsewhere.
188 if (index == 0) {
189 push(pop(type, DataTypeGeneric));
190 return;
193 SSATmp* tmp = pop(Type::StackElem, DataTypeGeneric);
194 extendStack(index - 1, type);
195 push(tmp);
198 SSATmp* HhbcTranslator::top(TypeConstraint tc, uint32_t index) const {
199 SSATmp* tmp = m_irb->evalStack().top(index);
200 if (!tmp) return nullptr;
201 m_irb->constrainValue(tmp, tc);
202 return tmp;
205 SSATmp* HhbcTranslator::top(Type type, uint32_t index,
206 TypeConstraint constraint) {
207 SSATmp* tmp = top(constraint, index);
208 if (!tmp) {
209 extendStack(index, type);
210 tmp = top(constraint, index);
212 assert(tmp);
213 return tmp;
216 void HhbcTranslator::replace(uint32_t index, SSATmp* tmp) {
217 m_irb->evalStack().replace(index, tmp);
220 Type HhbcTranslator::topType(uint32_t idx, TypeConstraint constraint) const {
221 FTRACE(5, "Asking for type of stack elem {}\n", idx);
222 if (idx < m_irb->evalStack().size()) {
223 return top(constraint, idx)->type();
224 } else {
225 auto absIdx = idx - m_irb->evalStack().size() + m_irb->stackDeficit();
226 auto stkVal = getStackValue(m_irb->sp(), absIdx);
227 m_irb->constrainStack(absIdx, constraint);
228 return stkVal.knownType;
232 size_t HhbcTranslator::spOffset() const {
233 return m_irb->spOffset() + m_irb->evalStack().size() - m_irb->stackDeficit();
237 * When doing gen-time inlining, we set up a series of IR instructions
238 * that looks like this:
240 * fp0 = DefFP
241 * sp0 = DefSP<offset>
243 * // ... normal stuff happens ...
244 * // sp_pre = some SpillStack, or maybe the DefSP
246 * // FPI region:
247 * sp1 = SpillStack sp_pre, ...
248 * sp2 = SpillFrame sp1, ...
249 * // ... possibly more spillstacks due to argument expressions
250 * sp3 = SpillStack sp2, -argCount
251 * fp2 = DefInlineFP<func,retBC,retSP> sp2 sp1
252 * sp4 = ReDefSP<spOffset,spansCall> sp1 fp2
254 * // ... callee body ...
256 * = InlineReturn fp2
258 * [ sp5 = ReDefSP<spOffset,spansCall> sp1 fp0 ]
260 * The rest of the code then depends on sp5, and not any of the StkPtr
261 * tree going through the callee body. The sp5 tmp has the same view
262 * of the stack as sp1 did, which represents what the stack looks like
263 * before the return address is pushed but after the activation record
264 * is popped.
266 * In DCE we attempt to remove the SpillFrame, InlineReturn, and
267 * DefInlineFP instructions if they aren't needed.
269 * ReDefSP takes sp1, the stack pointer from before the inlined frame.
270 * This SSATmp may be used for determining stack types in the
271 * simplifier, or stack values if the inlined body doesn't contain a
272 * call---these instructions both take an extradata `spansCall' which
273 * is true iff a Call occured anywhere between the the definition of
274 * its first argument and itself.
276 void HhbcTranslator::beginInlining(unsigned numParams,
277 const Func* target,
278 Offset returnBcOffset,
279 Type retTypePred) {
280 assert(!m_fpiStack.empty() &&
281 "Inlining does not support calls with the FPush* in a different Tracelet");
282 assert(returnBcOffset >= 0 && "returnBcOffset before beginning of caller");
283 assert(curFunc()->base() + returnBcOffset < curFunc()->past() &&
284 "returnBcOffset past end of caller");
286 FTRACE(1, "[[[ begin inlining: {}\n", target->fullName()->data());
288 SSATmp* params[numParams];
289 for (unsigned i = 0; i < numParams; ++i) {
290 params[numParams - i - 1] = popF();
293 auto const prevSP = m_fpiStack.top().first;
294 auto const prevSPOff = m_fpiStack.top().second;
295 auto const calleeSP = spillStack();
297 DefInlineFPData data;
298 data.target = target;
299 data.retBCOff = returnBcOffset;
300 data.retSPOff = prevSPOff;
301 data.retTypePred = retTypePred;
303 // Push state and update the marker before emitting any instructions so
304 // they're all given markers in the callee.
305 m_bcStateStack.emplace_back(target->getEntryForNumArgs(numParams),
306 false, target);
307 updateMarker();
309 always_assert_log(
310 findSpillFrame(calleeSP),
311 [&] {
312 return folly::format("Couldn't find SpillFrame for inlined call on sp {}."
313 " Was the FPush instruction interpreted?\n{}",
314 *calleeSP->inst(), m_irb->unit().toString()).str();
317 auto const calleeFP = gen(DefInlineFP, data, calleeSP, prevSP, m_irb->fp());
318 gen(
319 ReDefSP,
320 ReDefSPData {
321 target->numLocals(),
322 false /* spansCall; calls in FPI regions are not inline
323 * candidates currently */
325 m_irb->sp(),
326 m_irb->fp()
329 profileFunctionEntry("Inline");
331 for (unsigned i = 0; i < numParams; ++i) {
332 genStLocal(i, calleeFP, params[i]);
334 for (unsigned i = numParams; i < target->numLocals(); ++i) {
336 * Here we need to be generating hopefully-dead stores to
337 * initialize non-parameter locals to KindOfUninit in case we have
338 * to leave the trace.
340 genStLocal(i, calleeFP, cns(Type::Uninit));
343 m_fpiActiveStack.push(std::move(m_fpiStack.top()));
344 m_fpiStack.pop();
347 bool HhbcTranslator::isInlining() const {
348 return m_bcStateStack.size() > 1;
351 int HhbcTranslator::inliningDepth() const {
352 return m_bcStateStack.size() - 1;
355 BCMarker HhbcTranslator::makeMarker(Offset bcOff) {
356 int32_t stackOff = m_irb->spOffset() +
357 m_irb->evalStack().numCells() - m_irb->stackDeficit();
359 FTRACE(2, "makeMarker: bc {} sp {} fn {}\n",
360 bcOff, stackOff, curFunc()->fullName()->data());
362 return BCMarker {
363 SrcKey { curFunc(), bcOff, resumed() },
364 stackOff,
365 m_profTransID
369 void HhbcTranslator::updateMarker() {
370 m_irb->setMarker(makeMarker(bcOff()));
373 void HhbcTranslator::profileFunctionEntry(const char* category) {
374 static const bool enabled = Stats::enabledAny() &&
375 getenv("HHVM_STATS_FUNCENTRY");
376 if (!enabled) return;
378 gen(
379 IncStatGrouped,
380 cns(makeStaticString("FunctionEntry")),
381 cns(makeStaticString(category)),
382 cns(1)
386 void HhbcTranslator::profileInlineFunctionShape(const std::string& str) {
387 gen(
388 IncStatGrouped,
389 cns(makeStaticString("InlineShape")),
390 cns(makeStaticString(str)),
391 cns(1)
395 void HhbcTranslator::profileSmallFunctionShape(const std::string& str) {
396 gen(
397 IncStatGrouped,
398 cns(makeStaticString("SmallFunctions")),
399 cns(makeStaticString(str)),
400 cns(1)
404 void HhbcTranslator::profileFailedInlShape(const std::string& str) {
405 gen(
406 IncStatGrouped,
407 cns(makeStaticString("FailedInl")),
408 cns(makeStaticString(str)),
409 cns(1)
413 void HhbcTranslator::setProfTransID(TransID id) {
414 m_profTransID = id;
417 void HhbcTranslator::setBcOff(Offset newOff, bool lastBcOff) {
418 always_assert_log(
419 IMPLIES(isInlining(), !lastBcOff),
420 [&] {
421 return folly::format("Tried to end trace while inlining:\n{}",
422 unit()).str();
426 m_bcStateStack.back().bcOff = newOff;
427 updateMarker();
428 m_lastBcOff = lastBcOff;
431 void HhbcTranslator::emitPrint() {
432 Type type = topC()->type();
433 if (type.subtypeOfAny(Type::Int, Type::Bool, Type::Null, Type::Str)) {
434 auto const cell = popC();
436 Opcode op;
437 if (type <= Type::Str) {
438 op = PrintStr;
439 } else if (type <= Type::Int) {
440 op = PrintInt;
441 } else if (type <= Type::Bool) {
442 op = PrintBool;
443 } else {
444 assert(type <= Type::Null);
445 op = Nop;
447 // the print helpers decref their arg, so don't decref pop'ed value
448 if (op != Nop) {
449 gen(op, cell);
451 push(cns(1));
452 } else {
453 emitInterpOne(Type::Int, 1);
457 void HhbcTranslator::emitUnboxRAux() {
458 Block* exit = makeExit();
459 SSATmp* srcBox = popR();
460 SSATmp* unboxed = unbox(srcBox, exit);
461 if (unboxed == srcBox) {
462 // If the Unbox ended up being a noop, don't bother refcounting
463 push(unboxed);
464 } else {
465 pushIncRef(unboxed);
466 gen(DecRef, srcBox);
470 void HhbcTranslator::emitUnboxR() {
471 emitUnboxRAux();
474 void HhbcTranslator::emitUnbox() {
475 Block* exit = makeExit();
476 SSATmp* srcBox = popV();
477 SSATmp* unboxed = unbox(srcBox, exit);
478 pushIncRef(unboxed);
479 gen(DecRef, srcBox);
482 void HhbcTranslator::emitThis() {
483 pushIncRef(gen(LdThis, makeExitNullThis(), m_irb->fp()));
486 void HhbcTranslator::emitCheckThis() {
487 gen(LdThis, makeExitNullThis(), m_irb->fp());
490 void HhbcTranslator::emitRB(Trace::RingBufferType t, SrcKey sk, int level) {
491 if (!Trace::moduleEnabledRelease(Trace::ringbuffer, level)) return;
493 gen(RBTrace, RBTraceData(t, sk));
496 void HhbcTranslator::emitRB(Trace::RingBufferType t, const StringData* msg,
497 int level) {
498 if (!Trace::moduleEnabledRelease(Trace::ringbuffer, level)) return;
500 gen(RBTrace, RBTraceData(t, msg));
503 void HhbcTranslator::emitDbgAssertRetAddr() {
504 gen(DbgAssertRetAddr);
507 void HhbcTranslator::emitBareThis(int notice) {
508 // We just exit the trace in the case $this is null. Before exiting
509 // the trace, we could also push null onto the stack and raise a
510 // notice if the notice argument is set. By exiting the trace when
511 // $this is null, we can be sure in the rest of the trace that we
512 // have the this object on top of the stack, and we can eliminate
513 // further null checks of this.
514 if (!curClass()) {
515 emitInterpOne(Type::InitNull, 0); // will raise notice and push null
516 return;
518 if (notice == static_cast<int>(BareThisOp::NeverNull)) {
519 setThisAvailable();
521 pushIncRef(gen(LdThis, makeExitSlow(), m_irb->fp()));
524 void HhbcTranslator::emitArray(int arrayId) {
525 push(cns(lookupArrayId(arrayId)));
528 void HhbcTranslator::emitNewArray(int capacity) {
529 if (capacity == 0) {
530 push(cns(staticEmptyArray()));
531 } else {
532 push(gen(NewArray, cns(capacity)));
536 void HhbcTranslator::emitNewPackedArray(int numArgs) {
537 // The NewPackedArray opcode's helper needs array values passed to it
538 // via the stack. We use spillStack() to flush the eval stack and
539 // obtain a pointer to the topmost item; if over-flushing becomes
540 // a problem then we should refactor the NewPackedArray opcode to
541 // take its values directly as SSA operands.
543 // Before the spillStack() we touch all of the incoming stack
544 // arguments so that they are available to later optimizations via
545 // getStackValue().
546 for (int i = 0; i < numArgs; i++) topC(i, DataTypeGeneric);
547 SSATmp* sp = spillStack();
548 for (int i = 0; i < numArgs; i++) popC(DataTypeGeneric);
549 push(gen(NewPackedArray, cns(numArgs), sp));
552 void HhbcTranslator::emitNewStructArray(uint32_t numArgs, StringData** keys) {
553 // The NewPackedArray opcode's helper needs array values passed to it
554 // via the stack. We use spillStack() to flush the eval stack and
555 // obtain a pointer to the topmost item; if over-flushing becomes
556 // a problem then we should refactor the NewPackedArray opcode to
557 // take its values directly as SSA operands.
558 SSATmp* sp = spillStack();
559 for (int i = 0; i < numArgs; i++) popC(DataTypeGeneric);
560 NewStructData extra;
561 extra.numKeys = numArgs;
562 extra.keys = new (m_unit.arena()) StringData*[numArgs];
563 memcpy(extra.keys, keys, numArgs * sizeof(*keys));
564 push(gen(NewStructArray, extra, sp));
567 void HhbcTranslator::emitArrayAdd() {
568 if (!topC(0)->isA(Type::Arr) || !topC(1)->isA(Type::Arr)) {
569 // This happens when we have a prior spillstack that optimizes away
570 // its spilled values because they were already on the stack. This
571 // prevents us from getting to type of the SSATmps popped from the
572 // eval stack. Most likely we had an interpone before this
573 // instruction.
574 emitInterpOne(Type::Arr, 2);
575 return;
578 auto catchBlock = makeCatch();
579 SSATmp* tr = popC();
580 SSATmp* tl = popC();
581 // The ArrayAdd helper decrefs its args, so don't decref pop'ed values.
582 push(gen(ArrayAdd, catchBlock, tl, tr));
585 void HhbcTranslator::emitAddElemC() {
586 // This is just to peek at the type; it'll be consumed for real down below and
587 // we don't want to constrain it if we're just going to InterpOne.
588 auto kt = topC(1, DataTypeGeneric)->type();
589 Opcode op;
590 if (kt <= Type::Int) {
591 op = AddElemIntKey;
592 } else if (kt <= Type::Str) {
593 op = AddElemStrKey;
594 } else {
595 emitInterpOne(Type::Arr, 3);
596 return;
599 // val is teleported from the stack to the array, so we don't have to do any
600 // refcounting.
601 auto const val = popC(DataTypeGeneric);
602 auto const key = popC();
603 auto const arr = popC();
604 // The AddElem* instructions decref their args, so don't decref pop'ed
605 // values.
606 push(gen(op, arr, key, val));
609 void HhbcTranslator::emitAddNewElemC() {
610 if (!topC(1)->isA(Type::Arr)) {
611 return emitInterpOne(Type::Arr, 2);
614 auto const val = popC();
615 auto const arr = popC();
616 // The AddNewElem helper decrefs its args, so don't decref pop'ed values.
617 push(gen(AddNewElem, arr, val));
620 void HhbcTranslator::emitNewCol(int type, int size) {
621 push(gen(NewCol, cns(type), cns(size)));
624 void HhbcTranslator::emitClone() {
625 if (!topC()->isA(Type::Obj)) PUNT(Clone-NonObj);
626 auto const catchTrace = makeCatch();
627 auto const obj = popC();
628 push(gen(Clone, catchTrace, obj));
629 gen(DecRef, obj);
632 void HhbcTranslator::emitColAddElemC() {
633 if (!topC(2)->isA(Type::Obj)) {
634 return emitInterpOne(Type::Obj, 3);
636 if (!topC(1, DataTypeGeneric)->type().subtypeOfAny(Type::Int, Type::Str)) {
637 emitInterpOne(Type::Obj, 3);
638 return;
641 auto* catchBlock = makeCatch();
642 auto const val = popC();
643 auto const key = popC();
644 auto const coll = popC();
645 push(gen(ColAddElemC, catchBlock, coll, key, val));
646 gen(DecRef, key);
649 void HhbcTranslator::emitColAddNewElemC() {
650 if (!topC(1)->isA(Type::Obj)) {
651 return emitInterpOne(Type::Obj, 2);
654 auto* catchBlock = makeCatch();
655 auto const val = popC();
656 auto const coll = popC();
657 // The AddNewElem helper decrefs its args, so don't decref pop'ed values.
658 push(gen(ColAddNewElemC, catchBlock, coll, val));
661 void HhbcTranslator::emitCnsCommon(uint32_t id,
662 uint32_t fallback,
663 bool error) {
664 assert(fallback == kInvalidId || !error);
665 StringData* name = curUnit()->lookupLitstrId(id);
666 SSATmp* cnsNameTmp = cns(name);
667 const TypedValue* tv = Unit::lookupPersistentCns(name);
668 SSATmp* result = nullptr;
670 SSATmp* fallbackNameTmp = nullptr;
671 if (fallback != kInvalidId) {
672 StringData* fallbackName = curUnit()->lookupLitstrId(fallback);
673 fallbackNameTmp = cns(fallbackName);
675 if (tv) {
676 if (tv->m_type == KindOfUninit) {
677 // KindOfUninit is a dynamic system constant. always a slow
678 // lookup.
679 assert(!fallbackNameTmp);
680 if (error) {
681 result = gen(LookupCnsE, cnsNameTmp);
682 } else {
683 result = gen(LookupCns, makeCatch(), cnsNameTmp);
685 } else {
686 result = staticTVCns(tv);
688 } else {
689 SSATmp* c1 = gen(LdCns, cnsNameTmp);
690 result = m_irb->cond(
692 [&] (Block* taken) { // branch
693 gen(CheckInit, taken, c1);
695 [&] { // Next: LdCns hit in TC
696 return c1;
698 [&] { // Taken: miss in TC, do lookup & init
699 m_irb->hint(Block::Hint::Unlikely);
700 // We know that c1 is Uninit in this branch but we have to encode this
701 // in the IR.
702 gen(AssertType, Type::Uninit, c1);
704 if (fallbackNameTmp) {
705 return gen(LookupCnsU, makeCatch(),
706 cnsNameTmp, fallbackNameTmp);
708 if (error) {
709 return gen(LookupCnsE, makeCatch(), cnsNameTmp);
711 return gen(LookupCns, makeCatch(), cnsNameTmp);
714 push(result);
717 void HhbcTranslator::emitCns(uint32_t id) {
718 emitCnsCommon(id, kInvalidId, false);
721 void HhbcTranslator::emitCnsE(uint32_t id) {
722 emitCnsCommon(id, kInvalidId, true);
725 void HhbcTranslator::emitCnsU(uint32_t id, uint32_t fallbackId) {
726 emitCnsCommon(id, fallbackId, false);
729 void HhbcTranslator::emitDefCns(uint32_t id) {
730 emitInterpOne(Type::Bool, 1);
733 void HhbcTranslator::emitConcat() {
734 auto const catchBlock = makeCatch();
735 SSATmp* tr = popC();
736 SSATmp* tl = popC();
737 // Concat consumes only first ref, never second
738 push(gen(ConcatCellCell, catchBlock, tl, tr));
739 // so we need to consume second ref ourselves
740 gen(DecRef, tr);
743 void HhbcTranslator::emitConcatN(int n) {
744 if (n == 2) return emitConcat();
746 auto const catchBlock = makeCatch();
748 SSATmp* t1 = popC();
749 SSATmp* t2 = popC();
750 SSATmp* t3 = popC();
752 if (!t1->isA(Type::Str) ||
753 !t2->isA(Type::Str) ||
754 !t3->isA(Type::Str)) {
755 PUNT(ConcatN);
758 if (n == 3) {
759 push(gen(ConcatStr3, catchBlock, t3, t2, t1));
760 gen(DecRef, t2);
761 gen(DecRef, t1);
763 } else if (n == 4) {
764 SSATmp* t4 = popC();
765 if (!t4->isA(Type::Str)) PUNT(ConcatN);
767 push(gen(ConcatStr4, catchBlock, t4, t3, t2, t1));
768 gen(DecRef, t3);
769 gen(DecRef, t2);
770 gen(DecRef, t1);
772 } else {
773 not_reached();
777 void HhbcTranslator::emitDefCls(int cid, Offset after) {
778 emitInterpOne(0);
781 void HhbcTranslator::emitDefFunc(int fid) {
782 emitInterpOne(0);
785 void HhbcTranslator::emitLateBoundCls() {
786 Class* clss = curClass();
787 if (!clss) {
788 // no static context class, so this will raise an error
789 emitInterpOne(Type::Cls, 0);
790 return;
792 auto const ctx = gen(LdCtx, FuncData(curFunc()), m_irb->fp());
793 push(gen(LdClsCtx, ctx));
796 void HhbcTranslator::emitSelf() {
797 Class* clss = curClass();
798 if (clss == nullptr) {
799 emitInterpOne(Type::Cls, 0);
800 } else {
801 push(cns(clss));
805 void HhbcTranslator::emitParent() {
806 auto const clss = curClass();
807 if (clss == nullptr || clss->parent() == nullptr) {
808 emitInterpOne(Type::Cls, 0);
809 } else {
810 push(cns(clss->parent()));
814 void HhbcTranslator::emitString(int strId) {
815 push(cns(lookupStringId(strId)));
818 void HhbcTranslator::emitInt(int64_t val) {
819 push(cns(val));
822 void HhbcTranslator::emitDouble(double val) {
823 push(cns(val));
826 void HhbcTranslator::emitNullUninit() {
827 push(cns(Type::Uninit));
830 void HhbcTranslator::emitNull() {
831 push(cns(Type::InitNull));
834 void HhbcTranslator::emitTrue() {
835 push(cns(true));
838 void HhbcTranslator::emitFalse() {
839 push(cns(false));
842 void HhbcTranslator::emitDir() {
843 push(cns(curUnit()->dirpath()));
846 void HhbcTranslator::emitFile() {
847 push(cns(curUnit()->filepath()));
850 void HhbcTranslator::emitInitThisLoc(int32_t id) {
851 if (!curClass()) {
852 // Do nothing if this is null
853 return;
855 auto const ldrefExit = makeExit();
856 auto const tmpThis = gen(LdThis, makeExitSlow(), m_irb->fp());
857 gen(IncRef, tmpThis);
858 auto const oldLoc = ldLoc(id, ldrefExit, DataTypeCountness);
859 genStLocal(id, m_irb->fp(), tmpThis);
860 gen(DecRef, oldLoc);
863 void HhbcTranslator::emitCGetL(int32_t id) {
864 auto ldrefExit = makeExit();
865 auto ldgblExit = makePseudoMainExit();
866 // Mimic hhbc guard relaxation for now.
867 auto cat = curSrcKey().op() == OpFPassL ? DataTypeSpecific
868 : DataTypeCountnessInit;
869 pushIncRef(ldLocInnerWarn(id, ldrefExit, ldgblExit, cat));
872 void HhbcTranslator::emitFPassL(int32_t id) {
873 auto ldrefExit = makeExit();
874 auto ldgblExit = makePseudoMainExit();
875 pushIncRef(ldLocInnerWarn(id, ldrefExit, ldgblExit, DataTypeSpecific));
878 void HhbcTranslator::emitPushL(uint32_t id) {
879 assertTypeLocal(id, Type::InitCell);
880 auto* locVal = ldLoc(id, makeExit(), DataTypeGeneric);
881 push(locVal);
882 genStLocal(id, m_irb->fp(), cns(Type::Uninit));
885 void HhbcTranslator::emitCGetL2(int32_t id) {
886 auto ldrefExit = makeExit();
887 auto ldgblExit = makePseudoMainExit();
888 auto catchBlock = makeCatch();
889 SSATmp* oldTop = pop(Type::StackElem);
890 auto val = ldLocInnerWarn(
892 ldrefExit,
893 ldgblExit,
894 DataTypeCountnessInit,
895 catchBlock
897 pushIncRef(val);
898 push(oldTop);
901 void HhbcTranslator::emitVGetL(int32_t id) {
902 auto value = ldLoc(id, makeExit(), DataTypeCountnessInit);
903 auto const t = value->type();
904 always_assert(t.isBoxed() || t.notBoxed());
906 if (t.notBoxed()) {
907 if (value->isA(Type::Uninit)) {
908 value = cns(Type::InitNull);
910 value = gen(Box, value);
911 genStLocal(id, m_irb->fp(), value);
913 pushIncRef(value);
916 void HhbcTranslator::emitUnsetL(int32_t id) {
917 auto const prev = ldLoc(id, makeExit(), DataTypeCountness);
918 genStLocal(id, m_irb->fp(), cns(Type::Uninit));
919 gen(DecRef, prev);
922 void HhbcTranslator::emitBindL(int32_t id) {
923 if (inPseudoMain()) {
924 emitInterpOne(Type::BoxedCell, 1);
925 return;
928 auto const ldgblExit = makePseudoMainExit();
929 auto const newValue = popV();
930 // Note that the IncRef must happen first, for correctness in a
931 // pseudo-main: the destructor could decref the value again after
932 // we've stored it into the local.
933 pushIncRef(newValue);
934 auto const oldValue = ldLoc(id, ldgblExit, DataTypeSpecific);
935 genStLocal(id, m_irb->fp(), newValue);
936 gen(DecRef, oldValue);
939 void HhbcTranslator::emitSetL(int32_t id) {
940 auto const ldrefExit = makeExit();
941 auto const ldgblExit = makePseudoMainExit();
943 // since we're just storing the value in a local, this function doesn't care
944 // about the type of the value. stLoc needs to IncRef the value so it may
945 // constrain it further.
946 auto const src = popC(DataTypeGeneric);
947 pushStLoc(id, ldrefExit, ldgblExit, src);
950 void HhbcTranslator::emitIncDecL(bool pre, bool inc, bool over, uint32_t id) {
951 auto const ldrefExit = makeExit();
952 auto const ldgblExit = makePseudoMainExit();
953 auto const src = ldLocInnerWarn(
955 ldrefExit,
956 ldgblExit,
957 DataTypeSpecific
960 if (src->isA(Type::Bool)) {
961 push(src);
962 return;
965 if (src->type().subtypeOfAny(Type::Arr, Type::Obj)) {
966 pushIncRef(src);
967 return;
970 if (src->isA(Type::Null)) {
971 push(inc && pre ? cns(1) : src);
972 if (inc) {
973 stLoc(id, ldrefExit, ldgblExit, cns(1));
975 return;
978 if (!src->type().subtypeOfAny(Type::Int, Type::Dbl)) {
979 PUNT(IncDecL);
982 auto const res = emitIncDec(pre, inc, over, src);
983 stLoc(id, ldrefExit, ldgblExit, res);
986 // only handles integer or double inc/dec
987 SSATmp* HhbcTranslator::emitIncDec(bool pre, bool inc, bool over, SSATmp* src) {
988 assert(src->isA(Type::Int) || src->isA(Type::Dbl));
990 Opcode op;
992 if (src->isA(Type::Dbl)) {
993 op = inc ? AddDbl : SubDbl;
994 } else if (!over) {
995 op = inc ? AddInt : SubInt;
996 } else {
997 op = inc ? AddIntO : SubIntO;
1000 SSATmp* one = src->isA(Type::Int) ? cns(1) : cns(1.0);
1001 SSATmp* res = nullptr;
1003 if (op == AddIntO || op == SubIntO) {
1004 auto spills = peekSpillValues();
1005 auto const exit = makeExitImpl(
1006 bcOff(),
1007 ExitFlag::Interp,
1008 spills,
1009 CustomExit{}
1011 res = gen(op, exit, src, one);
1012 } else {
1013 res = gen(op, src, one);
1016 // no incref necessary on push since result is an int
1017 push(pre ? res : src);
1018 return res;
1021 #define BINARY_ARITH \
1022 AOP(Add, AddInt, AddDbl) \
1023 AOP(Sub, SubInt, SubDbl) \
1024 AOP(Mul, MulInt, MulDbl) \
1025 AOP(AddO, AddIntO, AddDbl) \
1026 AOP(SubO, SubIntO, SubDbl) \
1027 AOP(MulO, MulIntO, MulDbl) \
1029 #define BINARY_BITOP \
1030 BOP(BitAnd, AndInt) \
1031 BOP(BitOr, OrInt) \
1032 BOP(BitXor, XorInt) \
1034 static bool areBinaryArithTypesSupported(Op op, Type t1, Type t2) {
1035 auto checkArith = [](Type ty) {
1036 return ty.subtypeOfAny(Type::Int, Type::Bool, Type::Dbl);
1038 auto checkBitOp = [](Type ty) {
1039 return ty.subtypeOfAny(Type::Int, Type::Bool);
1042 switch (op) {
1043 #define AOP(OP, OPI, OPD) \
1044 case Op::OP: return checkArith(t1) && checkArith(t2);
1045 BINARY_ARITH
1046 #undef AOP
1047 #define BOP(OP, OPI) \
1048 case Op::OP: return checkBitOp(t1) && checkBitOp(t2);
1049 BINARY_BITOP
1050 #undef BOP
1051 default: not_reached();
1055 Opcode intArithOp(Op op) {
1056 switch (op) {
1057 #define AOP(OP, OPI, OPD) case Op::OP: return OPI;
1058 BINARY_ARITH
1059 #undef AOP
1060 default: not_reached();
1064 Opcode dblArithOp(Op op) {
1065 switch (op) {
1066 #define AOP(OP, OPI, OPD) case Op::OP: return OPD;
1067 BINARY_ARITH
1068 #undef AOP
1069 default: not_reached();
1073 Opcode bitOp(Op op) {
1074 switch (op) {
1075 #define BOP(OP, OPI) case Op::OP: return OPI;
1076 BINARY_BITOP
1077 #undef BOP
1078 default: not_reached();
1082 bool isBitOp(Op op) {
1083 switch (op) {
1084 #define BOP(OP, OPI) case Op::OP: return true;
1085 BINARY_BITOP
1086 #undef BOP
1087 default: return false;
1091 SSATmp* HhbcTranslator::promoteBool(SSATmp* src) {
1092 // booleans in arithmetic and bitwise operations get cast to ints
1093 return src->isA(Type::Bool) ? gen(ConvBoolToInt, src) : src;
1096 Opcode HhbcTranslator::promoteBinaryDoubles(Op op,
1097 SSATmp*& src1,
1098 SSATmp*& src2) {
1099 auto type1 = src1->type();
1100 auto type2 = src2->type();
1102 Opcode opc = intArithOp(op);
1103 if (type1 <= Type::Dbl) {
1104 opc = dblArithOp(op);
1105 if (type2 <= Type::Int) {
1106 src2 = gen(ConvIntToDbl, src2);
1108 } else if (type2 <= Type::Dbl) {
1109 opc = dblArithOp(op);
1110 src1 = gen(ConvIntToDbl, src1);
1112 return opc;
1115 void HhbcTranslator::emitSetOpL(Op subOp, uint32_t id) {
1116 // Needs to modify locals after doing effectful operations like
1117 // ConcatCellCell, so we can't guard on their types.
1118 if (inPseudoMain()) PUNT(SetOpL-PseudoMain);
1120 // Null guard block for globals because we always punt on pseudomains
1121 auto const ldgblExit = nullptr;
1124 * Handle array addition first because we don't want to bother with
1125 * boxed locals.
1127 bool isAdd = (subOp == Op::Add || subOp == Op::AddO);
1128 if (isAdd && (m_irb->localType(id, DataTypeSpecific) <= Type::Arr) &&
1129 topC()->isA(Type::Arr)) {
1131 * ArrayAdd decrefs its sources and returns a new array with
1132 * refcount == 1. That covers the local, so incref once more for
1133 * the stack.
1135 auto const catchBlock = makeCatch();
1136 auto const loc = ldLoc(id, ldgblExit, DataTypeSpecific);
1137 auto const val = popC();
1138 auto const result = gen(ArrayAdd, catchBlock, loc, val);
1139 genStLocal(id, m_irb->fp(), result);
1140 pushIncRef(result);
1141 return;
1144 auto const ldrefExit = makeExit();
1145 auto loc = ldLocInnerWarn(id, ldrefExit, ldgblExit, DataTypeGeneric);
1147 if (subOp == Op::Concat) {
1149 * The concat helpers incref their results, which will be consumed by
1150 * the stloc. We need an extra incref for the push onto the stack.
1152 auto const catchBlock = makeCatch();
1153 auto const val = popC();
1154 m_irb->constrainValue(loc, DataTypeSpecific);
1155 auto const result = gen(ConcatCellCell, catchBlock, loc, val);
1157 // Null exit block for 'ldrefExit' because this is a local that we've
1158 // already guarded against in the upper ldLocInnerWarn, and we can't run
1159 // any guards since ConcatCellCell can have effects.
1160 pushIncRef(stLocNRC(id, nullptr, ldgblExit, result));
1162 // ConcatCellCell does not DecRef its second argument,
1163 // so we need to do it here
1164 gen(DecRef, val);
1165 return;
1168 if (areBinaryArithTypesSupported(subOp, loc->type(), topC()->type())) {
1169 auto val = popC();
1170 m_irb->constrainValue(loc, DataTypeSpecific);
1171 loc = promoteBool(loc);
1172 val = promoteBool(val);
1173 Opcode opc;
1174 if (isBitOp(subOp)) {
1175 opc = bitOp(subOp);
1176 } else {
1177 opc = promoteBinaryDoubles(subOp, loc, val);
1180 SSATmp* result = nullptr;
1181 if (opc == AddIntO || opc == SubIntO || opc == MulIntO) {
1182 auto spillValues = peekSpillValues();
1183 spillValues.push_back(val);
1184 auto const exit = makeExitImpl(
1185 bcOff(),
1186 ExitFlag::Interp,
1187 spillValues,
1188 CustomExit{}
1190 result = gen(opc, exit, loc, val);
1191 } else {
1192 result = gen(opc, loc, val);
1194 pushStLoc(id, ldrefExit, ldgblExit, result);
1195 return;
1198 PUNT(SetOpL);
1201 void HhbcTranslator::emitOODeclExists(unsigned char ucsubop) {
1202 auto const subop = static_cast<OODeclExistsOp>(ucsubop);
1203 auto const catchTrace = makeCatch();
1205 auto const tAutoload = popC();
1206 auto const tCls = popC();
1208 assert(tCls->isA(Type::Str)); // result of CastString
1209 assert(tAutoload->isA(Type::Bool)); // result of CastBool
1211 ClassKind kind;
1212 switch (subop) {
1213 case OODeclExistsOp::Class : kind = ClassKind::Class; break;
1214 case OODeclExistsOp::Trait : kind = ClassKind::Trait; break;
1215 case OODeclExistsOp::Interface : kind = ClassKind::Interface; break;
1218 push(gen(OODeclExists, catchTrace, ClassKindData { kind }, tCls, tAutoload));
1219 gen(DecRef, tCls);
1222 void HhbcTranslator::emitStaticLocInit(uint32_t locId, uint32_t litStrId) {
1223 if (inPseudoMain()) PUNT(StaticLocInit);
1225 auto const ldgblExit = makePseudoMainExit();
1226 auto const name = lookupStringId(litStrId);
1227 auto const value = popC();
1229 // Closures and generators from closures don't satisfy the "one static per
1230 // source location" rule that the inline fastpath requires
1231 auto const box = [&]{
1232 if (curFunc()->isClosureBody()) {
1233 return gen(ClosureStaticLocInit, cns(name), m_irb->fp(), value);
1236 auto const cachedBox =
1237 gen(LdStaticLocCached, StaticLocName { curFunc(), name });
1238 m_irb->ifThen(
1239 [&] (Block* taken) {
1240 gen(CheckStaticLocInit, taken, cachedBox);
1242 [&] {
1243 m_irb->hint(Block::Hint::Unlikely);
1244 gen(StaticLocInitCached, cachedBox, value);
1247 return cachedBox;
1248 }();
1249 gen(IncRef, box);
1250 auto const oldValue = ldLoc(locId, ldgblExit, DataTypeSpecific);
1251 genStLocal(locId, m_irb->fp(), box);
1252 gen(DecRef, oldValue);
1253 // We don't need to decref value---it's a bytecode invariant that
1254 // our Cell was not ref-counted.
1257 void HhbcTranslator::emitStaticLoc(uint32_t locId, uint32_t litStrId) {
1258 if (inPseudoMain()) PUNT(StaticLoc);
1260 auto const ldgblExit = makePseudoMainExit();
1261 auto const name = lookupStringId(litStrId);
1263 auto const box = curFunc()->isClosureBody() ?
1264 gen(ClosureStaticLocInit, cns(name), m_irb->fp(), cns(Type::Uninit)) :
1265 gen(LdStaticLocCached, StaticLocName { curFunc(), name });
1267 auto const res = m_irb->cond(
1269 [&] (Block* taken) {
1270 gen(CheckStaticLocInit, taken, box);
1272 [&] { // Next: the static local is already initialized
1273 return cns(true);
1275 [&] { // Taken: need to initialize the static local
1277 * Even though this path is "cold", we're not marking it
1278 * unlikely because the size of the instructions this will
1279 * generate is about 10 bytes, which is not much larger than the
1280 * 5 byte jump to acold would be.
1282 * One note about StaticLoc: we're literally always going to
1283 * generate a fallthrough trace here that is cold (the code that
1284 * initializes the static local). TODO(#2894612).
1286 gen(StaticLocInitCached, box, cns(Type::InitNull));
1287 return cns(false);
1289 gen(IncRef, box);
1290 auto const oldValue = ldLoc(locId, ldgblExit, DataTypeGeneric);
1291 genStLocal(locId, m_irb->fp(), box);
1292 gen(DecRef, oldValue);
1293 push(res);
1296 template<class Lambda>
1297 SSATmp* HhbcTranslator::emitIterInitCommon(int offset, Lambda genFunc,
1298 bool invertCond) {
1299 auto const src = popC();
1300 auto const type = src->type();
1301 if (!type.subtypeOfAny(Type::Arr, Type::Obj)) PUNT(IterInit);
1302 auto const res = genFunc(src);
1303 return emitJmpCondHelper(offset, !invertCond, res);
1306 template<class Lambda>
1307 SSATmp* HhbcTranslator::emitMIterInitCommon(int offset, Lambda genFunc) {
1308 auto exit = makeExit();
1310 SSATmp* src = topV();
1311 Type type = src->type();
1313 assert(type.isBoxed());
1314 m_irb->constrainValue(gen(LdRef, type.innerType(), exit, src),
1315 DataTypeSpecific);
1316 SSATmp* res = genFunc(src);
1317 SSATmp* out = popV();
1318 gen(DecRef, out);
1319 return emitJmpCondHelper(offset, true, res);
1322 void HhbcTranslator::emitIterInit(uint32_t iterId,
1323 int offset,
1324 uint32_t valLocalId,
1325 bool invertCond) {
1326 auto catchBlock = makeCatch();
1327 emitIterInitCommon(offset, [&] (SSATmp* src) {
1328 return gen(IterInit,
1329 Type::Bool,
1330 catchBlock,
1331 IterData(iterId, -1, valLocalId),
1332 src,
1333 m_irb->fp());
1335 invertCond);
1338 void HhbcTranslator::emitIterInitK(uint32_t iterId,
1339 int offset,
1340 uint32_t valLocalId,
1341 uint32_t keyLocalId,
1342 bool invertCond) {
1343 auto catchBlock = makeCatch();
1344 emitIterInitCommon(offset, [&] (SSATmp* src) {
1345 return gen(IterInitK,
1346 Type::Bool,
1347 catchBlock,
1348 IterData(iterId, keyLocalId, valLocalId),
1349 src,
1350 m_irb->fp());
1352 invertCond);
1355 void HhbcTranslator::emitIterNext(uint32_t iterId,
1356 int offset,
1357 uint32_t valLocalId,
1358 bool invertCond) {
1359 SSATmp* res = gen(
1360 IterNext,
1361 Type::Bool,
1362 makeCatch(),
1363 IterData(iterId, -1, valLocalId),
1364 m_irb->fp()
1366 emitJmpCondHelper(offset, invertCond, res);
1369 void HhbcTranslator::emitIterNextK(uint32_t iterId,
1370 int offset,
1371 uint32_t valLocalId,
1372 uint32_t keyLocalId,
1373 bool invertCond) {
1374 SSATmp* res = gen(
1375 IterNextK,
1376 Type::Bool,
1377 makeCatch(),
1378 IterData(iterId, keyLocalId, valLocalId),
1379 m_irb->fp()
1381 emitJmpCondHelper(offset, invertCond, res);
1384 void HhbcTranslator::emitWIterInit(uint32_t iterId,
1385 int offset,
1386 uint32_t valLocalId,
1387 bool invertCond) {
1388 auto catchBlock = makeCatch();
1389 emitIterInitCommon(
1390 offset, [&] (SSATmp* src) {
1391 return gen(WIterInit,
1392 Type::Bool,
1393 catchBlock,
1394 IterData(iterId, -1, valLocalId),
1395 src,
1396 m_irb->fp());
1398 invertCond);
1401 void HhbcTranslator::emitWIterInitK(uint32_t iterId,
1402 int offset,
1403 uint32_t valLocalId,
1404 uint32_t keyLocalId,
1405 bool invertCond) {
1406 auto catchBlock = makeCatch();
1407 emitIterInitCommon(
1408 offset, [&] (SSATmp* src) {
1409 return gen(WIterInitK,
1410 Type::Bool,
1411 catchBlock,
1412 IterData(iterId, keyLocalId, valLocalId),
1413 src,
1414 m_irb->fp());
1416 invertCond);
1419 void HhbcTranslator::emitWIterNext(uint32_t iterId,
1420 int offset,
1421 uint32_t valLocalId,
1422 bool invertCond) {
1423 SSATmp* res = gen(
1424 WIterNext,
1425 Type::Bool,
1426 makeCatch(),
1427 IterData(iterId, -1, valLocalId),
1428 m_irb->fp()
1430 emitJmpCondHelper(offset, invertCond, res);
1433 void HhbcTranslator::emitWIterNextK(uint32_t iterId,
1434 int offset,
1435 uint32_t valLocalId,
1436 uint32_t keyLocalId,
1437 bool invertCond) {
1438 SSATmp* res = gen(
1439 WIterNextK,
1440 Type::Bool,
1441 makeCatch(),
1442 IterData(iterId, keyLocalId, valLocalId),
1443 m_irb->fp()
1445 emitJmpCondHelper(offset, invertCond, res);
1448 void HhbcTranslator::emitMIterInit(uint32_t iterId,
1449 int offset,
1450 uint32_t valLocalId) {
1451 auto catchBlock = makeCatch();
1452 emitMIterInitCommon(offset, [&] (SSATmp* src) {
1453 return gen(
1454 MIterInit,
1455 Type::Bool,
1456 catchBlock,
1457 IterData(iterId, -1, valLocalId),
1458 src,
1459 m_irb->fp()
1464 void HhbcTranslator::emitMIterInitK(uint32_t iterId,
1465 int offset,
1466 uint32_t valLocalId,
1467 uint32_t keyLocalId) {
1468 auto catchBlock = makeCatch();
1469 emitMIterInitCommon(offset, [&] (SSATmp* src) {
1470 return gen(
1471 MIterInitK,
1472 Type::Bool,
1473 catchBlock,
1474 IterData(iterId, keyLocalId, valLocalId),
1475 src,
1476 m_irb->fp()
1481 void HhbcTranslator::emitMIterNext(uint32_t iterId,
1482 int offset,
1483 uint32_t valLocalId) {
1484 SSATmp* res = gen(
1485 MIterNext,
1486 Type::Bool,
1487 IterData(iterId, -1, valLocalId),
1488 m_irb->fp()
1490 emitJmpCondHelper(offset, false, res);
1493 void HhbcTranslator::emitMIterNextK(uint32_t iterId,
1494 int offset,
1495 uint32_t valLocalId,
1496 uint32_t keyLocalId) {
1497 SSATmp* res = gen(
1498 MIterNextK,
1499 Type::Bool,
1500 IterData(iterId, keyLocalId, valLocalId),
1501 m_irb->fp()
1503 emitJmpCondHelper(offset, false, res);
1506 void HhbcTranslator::emitIterFree(uint32_t iterId) {
1507 gen(IterFree, IterId(iterId), m_irb->fp());
1510 void HhbcTranslator::emitMIterFree(uint32_t iterId) {
1511 gen(MIterFree, IterId(iterId), m_irb->fp());
1514 void HhbcTranslator::emitDecodeCufIter(uint32_t iterId, int offset) {
1515 auto catchBlock = makeCatch();
1516 SSATmp* src = popC();
1517 Type type = src->type();
1518 if (type.subtypeOfAny(Type::Arr, Type::Str, Type::Obj)) {
1519 SSATmp* res = gen(DecodeCufIter, Type::Bool,
1520 IterId(iterId), catchBlock, src, m_irb->fp());
1521 gen(DecRef, src);
1522 emitJmpCondHelper(offset, true, res);
1523 } else {
1524 gen(DecRef, src);
1525 emitJmp(offset, true, nullptr);
1529 void HhbcTranslator::emitCIterFree(uint32_t iterId) {
1530 gen(CIterFree, IterId(iterId), m_irb->fp());
1533 void HhbcTranslator::emitIterBreak(const ImmVector& iv,
1534 uint32_t offset,
1535 bool breakTracelet) {
1536 int iterIndex;
1537 for (iterIndex = 0; iterIndex < iv.size(); iterIndex += 2) {
1538 IterKind iterKind = (IterKind)iv.vec32()[iterIndex];
1539 Id iterId = iv.vec32()[iterIndex + 1];
1540 switch (iterKind) {
1541 case KindOfIter: gen(IterFree, IterId(iterId), m_irb->fp()); break;
1542 case KindOfMIter: gen(MIterFree, IterId(iterId), m_irb->fp()); break;
1543 case KindOfCIter: gen(CIterFree, IterId(iterId), m_irb->fp()); break;
1547 if (!breakTracelet) return;
1548 gen(Jmp, makeExit(offset));
1551 void HhbcTranslator::emitCreateCont(Offset resumeOffset) {
1552 assert(!resumed());
1553 assert(curFunc()->isNonAsyncGenerator());
1555 auto const ldgblExit = makePseudoMainExit();
1557 if (curFunc()->attrs() & AttrMayUseVV) {
1558 gen(ExitOnVarEnv, makeExitSlow(), m_irb->fp());
1561 // Create the Generator object.
1562 auto const func = curFunc();
1563 auto const resumeSk = SrcKey(func, resumeOffset, true);
1564 auto const resumeAddr = gen(LdBindAddr, LdBindAddrData(resumeSk));
1565 auto const cont = gen(CreateCont, m_irb->fp(), cns(func->numSlotsInFrame()),
1566 resumeAddr, cns(resumeOffset));
1568 // Teleport local variables into the generator.
1569 SSATmp* contAR = gen(LdContActRec, Type::PtrToGen, cont);
1570 for (int i = 0; i < func->numLocals(); ++i) {
1571 auto const loc = ldLoc(i, ldgblExit, DataTypeGeneric);
1572 gen(StMem, contAR, cns(-cellsToBytes(i + 1)), loc);
1575 // Call the FunctionSuspend hook and put the return value on the stack so that
1576 // the unwinder would decref it.
1577 emitRetSurpriseCheck(contAR, nullptr, makeCatch({cont}), false);
1579 // Grab caller info from ActRec, free ActRec, store the return value
1580 // and return control to the caller.
1581 gen(StRetVal, m_irb->fp(), cont);
1582 SSATmp* retAddr = gen(LdRetAddr, m_irb->fp());
1583 SSATmp* sp = gen(RetAdjustStack, m_irb->fp());
1584 SSATmp* fp = gen(FreeActRec, m_irb->fp());
1585 gen(RetCtrl, RetCtrlData(false), sp, fp, retAddr);
1587 // Flag that this trace has a Ret instruction, so that no ExitTrace is needed
1588 m_hasExit = true;
1591 void HhbcTranslator::emitContEnter(Offset returnOffset) {
1592 assert(curClass());
1593 assert(curClass()->classof(c_Generator::classof()));
1594 assert(curFunc()->contains(returnOffset));
1596 // Load generator's FP and resume address.
1597 auto genObj = gen(LdThis, m_irb->fp());
1598 auto genFp = gen(LdContActRec, Type::FramePtr, genObj);
1599 auto resumeAddr =
1600 gen(LdContArRaw, RawMemData{RawMemData::ContResumeAddr}, genFp);
1602 // Make sure function enter hook is called if needed.
1603 auto exitSlow = makeExitSlow();
1604 gen(CheckSurpriseFlags, exitSlow);
1606 // Exit to interpreter if resume address is not known.
1607 resumeAddr = gen(CheckNonNull, exitSlow, resumeAddr);
1609 // Sync stack.
1610 auto const sp = spillStack();
1612 // Enter generator.
1613 auto returnBcOffset = returnOffset - curFunc()->base();
1614 gen(ContEnter, sp, m_irb->fp(), genFp, resumeAddr, cns(returnBcOffset));
1616 // The top of the stack was consumed by the generator.
1617 popC(DataTypeGeneric);
1620 void HhbcTranslator::emitResumedReturnControl(Block* catchBlock) {
1621 auto const sp = spillStack();
1622 emitRetSurpriseCheck(m_irb->fp(), nullptr, catchBlock, true);
1624 auto const retAddr = gen(LdRetAddr, m_irb->fp());
1625 auto const fp = gen(FreeActRec, m_irb->fp());
1627 gen(RetCtrl, RetCtrlData(true), sp, fp, retAddr);
1628 m_hasExit = true;
1631 void HhbcTranslator::emitYieldImpl(Offset resumeOffset) {
1632 // Resumable::setResumeAddr(resumeAddr, resumeOffset)
1633 auto const resumeSk = SrcKey(curFunc(), resumeOffset, true);
1634 auto const resumeAddr = gen(LdBindAddr, LdBindAddrData(resumeSk));
1635 gen(StContArRaw, RawMemData{RawMemData::ContResumeAddr}, m_irb->fp(),
1636 resumeAddr);
1637 gen(StContArRaw, RawMemData{RawMemData::ContResumeOffset}, m_irb->fp(),
1638 cns(resumeOffset));
1640 // Set yielded value.
1641 auto const oldValue = gen(LdContArValue, Type::Cell, m_irb->fp());
1642 gen(StContArValue, m_irb->fp(), popC(DataTypeGeneric)); // teleporting value
1643 gen(DecRef, oldValue);
1645 // Set state from Running to Started.
1646 gen(StContArRaw, RawMemData{RawMemData::ContState}, m_irb->fp(),
1647 cns(c_Generator::Started));
1650 void HhbcTranslator::emitYield(Offset resumeOffset) {
1651 auto catchBlock = makeCatchNoSpill();
1653 emitYieldImpl(resumeOffset);
1655 // take a fast path if this generator has no yield k => v;
1656 if (curFunc()->isPairGenerator()) {
1657 // this needs optimization
1658 auto const idx =
1659 gen(LdContArRaw, RawMemData{RawMemData::ContIndex}, m_irb->fp());
1660 auto const newIdx = gen(AddInt, idx, cns(1));
1661 gen(StContArRaw, RawMemData{RawMemData::ContIndex}, m_irb->fp(), newIdx);
1663 auto const oldKey = gen(LdContArKey, Type::Cell, m_irb->fp());
1664 gen(StContArKey, m_irb->fp(), newIdx);
1665 gen(DecRef, oldKey);
1666 } else {
1667 // we're guaranteed that the key is an int
1668 gen(ContArIncKey, m_irb->fp());
1671 // transfer control
1672 emitResumedReturnControl(catchBlock);
1675 void HhbcTranslator::emitYieldK(Offset resumeOffset) {
1676 auto catchBlock = makeCatchNoSpill();
1677 emitYieldImpl(resumeOffset);
1679 auto const newKey = popC();
1680 auto const oldKey = gen(LdContArKey, Type::Cell, m_irb->fp());
1681 gen(StContArKey, m_irb->fp(), newKey);
1682 gen(DecRef, oldKey);
1684 auto const keyType = newKey->type();
1685 if (keyType <= Type::Int) {
1686 gen(ContArUpdateIdx, m_irb->fp(), newKey);
1689 // transfer control
1690 emitResumedReturnControl(catchBlock);
1693 void HhbcTranslator::emitContCheck(bool checkStarted) {
1694 assert(curClass());
1695 SSATmp* cont = gen(LdThis, m_irb->fp());
1696 gen(ContPreNext, makeExitSlow(), cont, cns(checkStarted));
1699 void HhbcTranslator::emitContValid() {
1700 assert(curClass());
1701 SSATmp* cont = gen(LdThis, m_irb->fp());
1702 push(gen(ContValid, cont));
1705 void HhbcTranslator::emitContKey() {
1706 assert(curClass());
1707 SSATmp* cont = gen(LdThis, m_irb->fp());
1708 gen(ContStartedCheck, makeExitSlow(), cont);
1709 SSATmp* offset = cns(CONTOFF(m_key));
1710 SSATmp* value = gen(LdProp, Type::Cell, cont, offset);
1711 pushIncRef(value);
1714 void HhbcTranslator::emitContCurrent() {
1715 assert(curClass());
1716 SSATmp* cont = gen(LdThis, m_irb->fp());
1717 gen(ContStartedCheck, makeExitSlow(), cont);
1718 SSATmp* offset = cns(CONTOFF(m_value));
1719 SSATmp* value = gen(LdProp, Type::Cell, cont, offset);
1720 pushIncRef(value);
1723 void HhbcTranslator::emitAwaitE(SSATmp* child, Block* catchBlock,
1724 Offset resumeOffset, int numIters) {
1725 assert(curFunc()->isAsync());
1726 assert(!resumed());
1727 assert(child->isA(Type::Obj));
1728 auto const kMaxCellStores = 3;
1730 // Create the AsyncFunctionWaitHandle object.
1731 auto const func = curFunc();
1732 auto const resumeSk = SrcKey(func, resumeOffset, true);
1733 auto const resumeAddr = gen(LdBindAddr, LdBindAddrData(resumeSk));
1734 auto const waitHandle =
1735 gen(CreateAFWH, catchBlock, m_irb->fp(), cns(func->numSlotsInFrame()),
1736 resumeAddr, cns(resumeOffset),
1737 child);
1739 // Teleport local variables into the AsyncFunctionWaitHandle.
1740 SSATmp* asyncAR = gen(LdAFWHActRec, Type::PtrToGen, waitHandle);
1742 static_assert(sizeof(Iter) % sizeof(TypedValue) == 0, "Iter size changed");
1743 auto const numCells = func->numLocals() +
1744 numIters * sizeof(Iter) / sizeof(TypedValue);
1745 if (numIters == 0 && func->numLocals() <= kMaxCellStores) {
1746 for (int i = 0; i < func->numLocals(); ++i) {
1747 auto const loc = ldLoc(i, nullptr, DataTypeGeneric);
1748 gen(StCell, LocalOffset(localOffset(i)), asyncAR, loc);
1750 } else {
1751 gen(CopyAsyncCells, LocalId(numCells), m_irb->fp(), asyncAR);
1754 // Call the FunctionSuspend hook and put the AsyncFunctionWaitHandle
1755 // on the stack so that the unwinder would decref it.
1756 push(waitHandle);
1757 emitRetSurpriseCheck(asyncAR, nullptr, makeCatch(), false);
1758 discard(1);
1760 // Grab caller info from ActRec, free ActRec, store the return value
1761 // and return control to the caller.
1762 gen(StRetVal, m_irb->fp(), waitHandle);
1763 SSATmp* retAddr = gen(LdRetAddr, m_irb->fp());
1764 SSATmp* sp = gen(RetAdjustStack, m_irb->fp());
1765 SSATmp* fp = gen(FreeActRec, m_irb->fp());
1766 gen(RetCtrl, RetCtrlData(false), sp, fp, retAddr);
1769 void HhbcTranslator::emitAwaitR(SSATmp* child, Block* catchBlock,
1770 Block* catchBlockNoSpill, Offset resumeOffset) {
1771 assert(curFunc()->isAsync());
1772 assert(resumed());
1773 assert(child->isA(Type::Obj));
1775 // Prepare child for establishing dependency.
1776 gen(AFWHPrepareChild, catchBlock, m_irb->fp(), child);
1778 // Suspend the async function.
1779 auto const resumeSk = SrcKey(curFunc(), resumeOffset, true);
1780 auto const resumeAddr = gen(LdBindAddr, LdBindAddrData(resumeSk));
1781 gen(StAsyncArRaw, RawMemData{RawMemData::AsyncResumeAddr}, m_irb->fp(),
1782 resumeAddr);
1783 gen(StAsyncArRaw, RawMemData{RawMemData::AsyncResumeOffset}, m_irb->fp(),
1784 cns(resumeOffset));
1786 // Set up the dependency.
1787 gen(AFWHBlockOn, m_irb->fp(), child);
1789 // Transfer control back to the scheduler.
1790 auto const sp = spillStack();
1791 emitRetSurpriseCheck(m_irb->fp(), nullptr, catchBlockNoSpill, true);
1793 auto const retAddr = gen(LdRetAddr, m_irb->fp());
1794 auto const fp = gen(FreeActRec, m_irb->fp());
1796 gen(RetCtrl, RetCtrlData(true), sp, fp, retAddr);
1799 void HhbcTranslator::emitAwait(Offset resumeOffset, int numIters) {
1800 assert(curFunc()->isAsync());
1802 auto const catchBlock = makeCatch();
1803 auto const catchBlockNoSpill = resumed() ? makeCatchNoSpill() : nullptr;
1804 auto const exitSlow = makeExitSlow();
1806 if (!topC()->isA(Type::Obj)) PUNT(Await-NonObject);
1808 auto const child = popC();
1809 gen(JmpZero, exitSlow, gen(IsWaitHandle, child));
1810 if ((curFunc()->attrs() & AttrMayUseVV) && !resumed()) {
1811 gen(ExitOnVarEnv, exitSlow, m_irb->fp());
1814 // cns() would ODR-use these
1815 auto const kSucceeded = c_WaitHandle::STATE_SUCCEEDED;
1816 auto const kFailed = c_WaitHandle::STATE_FAILED;
1818 auto const state = gen(LdWHState, child);
1819 gen(JmpEq, exitSlow, state, cns(kFailed));
1821 m_irb->ifThenElse(
1822 [&] (Block* taken) {
1823 gen(JmpEq, taken, state, cns(kSucceeded));
1825 [&] { // Next: the wait handle is not finished, we need to suspend
1826 if (resumed()) {
1827 emitAwaitR(child, catchBlock, catchBlockNoSpill, resumeOffset);
1828 } else {
1829 emitAwaitE(child, catchBlock, resumeOffset, numIters);
1832 [&] { // Taken: retrieve the result from the wait handle
1833 auto const res = gen(LdWHResult, child);
1834 gen(IncRef, res);
1835 gen(DecRef, child);
1836 push(res);
1841 void HhbcTranslator::emitStrlen() {
1842 Type inType = topC()->type();
1844 if (inType <= Type::Str) {
1845 SSATmp* input = popC();
1846 if (input->isConst()) {
1847 // static string; fold its strlen operation
1848 push(cns(input->strVal()->size()));
1849 } else {
1850 push(gen(LdRaw, RawMemData{RawMemData::StrLen}, input));
1851 gen(DecRef, input);
1853 } else if (inType <= Type::Null) {
1854 popC();
1855 push(cns(0));
1856 } else if (inType <= Type::Bool) {
1857 // strlen(true) == 1, strlen(false) == 0.
1858 push(gen(ConvBoolToInt, popC()));
1859 } else {
1860 emitInterpOne(Type::Int | Type::InitNull, 1);
1864 void HhbcTranslator::emitIncStat(int32_t counter, int32_t value, bool force) {
1865 if (Stats::enabled() || force) {
1866 gen(IncStat, cns(counter), cns(value), cns(force));
1870 void HhbcTranslator::emitIdx() {
1871 Type keyType = topC(1, DataTypeGeneric)->type();
1872 SSATmp* base = topC(2, DataTypeGeneric);
1873 Type baseType = base->type();
1875 if (baseType <= Type::Arr &&
1876 (keyType <= Type::Int || keyType <= Type::Str)) {
1877 emitArrayIdx();
1878 } else {
1879 emitIdxCommon(GenericIdx, makeCatch());
1883 // NOTE: #3233688 talks about making an idx fast path for collections and
1884 // that is where this function will be used and make more sense. It's only
1885 // called once now.
1886 void HhbcTranslator::emitIdxCommon(Opcode opc, Block* catchBlock) {
1887 SSATmp* def = popC(DataTypeSpecific);
1888 SSATmp* key = popC(DataTypeSpecific);
1889 SSATmp* arr = popC(DataTypeSpecific);
1890 push(gen(opc, catchBlock, arr, key, def));
1891 gen(DecRef, arr);
1892 gen(DecRef, key);
1893 gen(DecRef, def);
1896 void HhbcTranslator::emitArrayIdx() {
1897 // These types are just used to decide what to do; once we know what we're
1898 // actually doing we constrain the values with the popC()s later on in this
1899 // function.
1900 Type keyType = topC(1, DataTypeGeneric)->type();
1901 Type arrType = topC(2, DataTypeGeneric)->type();
1903 if (!(arrType <= Type::Arr)) {
1904 // raise fatal
1905 emitInterpOne(Type::Cell, 3);
1906 return;
1909 if (keyType <= Type::Null) {
1910 SSATmp* def = popC(DataTypeGeneric); // def is just pushed back on the stack
1911 SSATmp* key = popC();
1912 SSATmp* arr = popC();
1914 // if the key is null it will not be found so just return the default
1915 push(def);
1916 gen(DecRef, arr);
1917 gen(DecRef, key);
1918 return;
1920 if (!(keyType <= Type::Int || keyType <= Type::Str)) {
1921 emitInterpOne(Type::Cell, 3);
1922 return;
1925 SSATmp* def = popC(DataTypeGeneric); // a helper will decref it but the
1926 // translated code doesn't care about
1927 // the type
1928 SSATmp* key = popC();
1929 SSATmp* arr = popC();
1931 KeyType arrayKeyType;
1932 bool checkForInt;
1933 checkStrictlyInteger(key, arrayKeyType, checkForInt);
1935 TCA opFunc;
1936 if (checkForInt) {
1937 opFunc = (TCA)&arrayIdxSi;
1938 } else if (KeyType::Int == arrayKeyType) {
1939 opFunc = (TCA)&arrayIdxI;
1940 } else {
1941 assert(KeyType::Str == arrayKeyType);
1942 opFunc = (TCA)&arrayIdxS;
1945 push(gen(ArrayIdx, cns(opFunc), arr, key, def));
1946 gen(DecRef, arr);
1947 gen(DecRef, key);
1948 gen(DecRef, def);
1951 void HhbcTranslator::emitIncTransCounter() {
1952 m_irb->gen(IncTransCounter);
1955 void HhbcTranslator::emitIncProfCounter(TransID transId) {
1956 m_irb->gen(IncProfCounter, TransIDData(transId));
1959 void HhbcTranslator::emitCheckCold(TransID transId) {
1960 m_irb->gen(CheckCold, makeExitOpt(transId), TransIDData(transId));
1963 void HhbcTranslator::emitMInstr(const NormalizedInstruction& ni) {
1964 if (inPseudoMain()) {
1965 emitInterpOne(ni);
1966 return;
1968 MInstrTranslator(ni, *this).emit();
1972 * IssetL: return true if var is not uninit and !is_null(var)
1973 * Unboxes var if necessary when var is not uninit.
1975 void HhbcTranslator::emitIssetL(int32_t id) {
1976 auto const ldrefExit = makeExit();
1977 auto const ldgblExit = makePseudoMainExit();
1978 auto const ld = ldLocInner(id, ldrefExit, ldgblExit, DataTypeSpecific);
1979 push(gen(IsNType, Type::Null, ld));
1982 void HhbcTranslator::emitEmptyL(int32_t id) {
1983 auto const ldrefExit = makeExit();
1984 auto const ldgblExit = makePseudoMainExit();
1985 auto const ld = ldLocInner(id, ldrefExit, ldgblExit, DataTypeSpecific);
1986 push(gen(XorBool, gen(ConvCellToBool, ld), cns(true)));
1989 void HhbcTranslator::emitIsTypeC(DataType t) {
1990 SSATmp* src = popC(DataTypeSpecific);
1991 push(gen(IsType, Type(t), src));
1992 gen(DecRef, src);
1995 void HhbcTranslator::emitIsTypeL(uint32_t id, DataType t) {
1996 auto const ldrefExit = makeExit();
1997 auto const ldgblExit = makePseudoMainExit();
1998 auto const val =
1999 ldLocInnerWarn(id, ldrefExit, ldgblExit, DataTypeSpecific);
2000 push(gen(IsType, Type(t), val));
2003 void HhbcTranslator::emitIsScalarL(int id) {
2004 auto const ldrefExit = makeExit();
2005 auto const ldgblExit = makePseudoMainExit();
2006 SSATmp* src = ldLocInner(id, ldrefExit, ldgblExit, DataTypeSpecific);
2007 push(gen(IsScalarType, src));
2010 void HhbcTranslator::emitIsScalarC() {
2011 SSATmp* src = popC();
2012 push(gen(IsScalarType, src));
2013 gen(DecRef, src);
2016 void HhbcTranslator::emitPopA() { popA(); }
2018 void HhbcTranslator::emitPopC() {
2019 popDecRef(Type::Cell, DataTypeGeneric);
2022 void HhbcTranslator::emitPopV() {
2023 popDecRef(Type::BoxedCell, DataTypeGeneric);
2026 void HhbcTranslator::emitPopR() {
2027 popDecRef(Type::Gen, DataTypeGeneric);
2030 void HhbcTranslator::emitDup() {
2031 pushIncRef(topC());
2034 void HhbcTranslator::emitJmp(int32_t offset,
2035 bool breakTracelet,
2036 Block* catchBlock) {
2037 // If surprise flags are set, exit trace and handle surprise
2038 bool backward = static_cast<uint32_t>(offset) <= bcOff();
2039 if (backward && catchBlock) {
2040 emitJmpSurpriseCheck(catchBlock);
2042 if (genMode() == IRGenMode::CFG) {
2043 // TODO(t3730057): Optimize away spillstacks and fallthrough
2044 // jumps, either by doing something clever here or adding to
2045 // jumpopts.
2046 exceptionBarrier();
2047 auto target = (breakTracelet
2048 || m_irb->blockIsIncompatible(offset))
2049 ? makeExit(offset)
2050 : makeBlock(offset);
2051 assert(target != nullptr);
2052 gen(Jmp, target);
2053 return;
2055 if (!breakTracelet) return;
2056 gen(Jmp, makeExit(offset));
2059 SSATmp* HhbcTranslator::emitJmpCondHelper(int32_t offset,
2060 bool negate,
2061 SSATmp* src) {
2062 spillStack();
2064 auto const target = makeExit(offset);
2065 auto const boolSrc = gen(ConvCellToBool, src);
2066 gen(DecRef, src);
2067 return gen(negate ? JmpZero : JmpNZero, target, boolSrc);
2070 void HhbcTranslator::emitJmpHelper(int32_t taken,
2071 int32_t next,
2072 bool negate,
2073 bool bothPaths,
2074 bool breaksTracelet,
2075 SSATmp* src) {
2076 if (breaksTracelet) {
2077 spillStack();
2079 if (genMode() == IRGenMode::CFG) {
2080 // Before jumping to a merge point we have to ensure that the
2081 // stack pointer is sync'ed. Without an ExceptionBarrier the
2082 // SpillStack can be removed by DCE (especially since merge points
2083 // start with a DefSP to block SP-chain walking).
2084 exceptionBarrier();
2086 auto const target = (!bothPaths
2087 || m_irb->blockIsIncompatible(taken))
2088 ? makeExit(taken)
2089 : makeBlock(taken);
2090 assert(target != nullptr);
2091 auto const boolSrc = gen(ConvCellToBool, src);
2092 gen(DecRef, src);
2093 gen(negate ? JmpZero : JmpNZero, target, boolSrc);
2095 // TODO(t3730079): This block is probably redundant with the
2096 // fallthrough logic in translateRegion. Try removing the guards
2097 // against conditional jumps there as well as this.
2098 if (genMode() == IRGenMode::CFG && m_irb->blockIsIncompatible(next)) {
2099 gen(Jmp, makeExit(next));
2103 void HhbcTranslator::emitJmpZ(Offset taken, Offset next, bool bothPaths,
2104 bool breaksTracelet) {
2105 auto const src = popC();
2106 emitJmpHelper(taken, next, true, bothPaths, breaksTracelet, src);
2109 void HhbcTranslator::emitJmpNZ(Offset taken, Offset next, bool bothPaths,
2110 bool breaksTracelet) {
2111 auto const src = popC();
2112 emitJmpHelper(taken, next, false, bothPaths, breaksTracelet, src);
2116 * True if comparison may throw or reenter.
2118 * 1. Objects compared with strings may involve calling a user-defined
2119 * __toString function.
2120 * 2. Array comparisons can throw if recursion is detected.
2122 bool cmpOpTypesMayReenter(Type t0, Type t1) {
2123 assert(!t0.equals(Type::Gen) && !t1.equals(Type::Gen));
2124 return (t0.maybe(Type::Obj) && t1.maybe(Type::Str)) ||
2125 (t0.maybe(Type::Str) && t1.maybe(Type::Obj)) ||
2126 (t0.maybe(Type::Obj) && t1.maybe(Type::Obj)) ||
2127 (t0.maybe(Type::Arr) && t1.maybe(Type::Arr));
2130 Opcode matchReentrantCmp(Opcode opc) {
2131 switch (opc) {
2132 case Gt: return GtX;
2133 case Gte: return GteX;
2134 case Lt: return LtX;
2135 case Lte: return LteX;
2136 case Eq: return EqX;
2137 case Neq: return NeqX;
2138 default: return opc;
2142 void HhbcTranslator::emitCmp(Opcode opc) {
2143 Block* catchBlock = nullptr;
2144 Opcode opc2 = matchReentrantCmp(opc);
2145 // if the comparison operator could re-enter, convert it to the re-entrant
2146 // form and add the required catch block.
2147 // TODO #3446092 un-overload these opcodes.
2148 if (cmpOpTypesMayReenter(topC(0)->type(), topC(1)->type()) && opc2 != opc) {
2149 catchBlock = makeCatch();
2150 opc = opc2;
2152 // src2 opc src1
2153 SSATmp* src1 = popC();
2154 SSATmp* src2 = popC();
2155 push(gen(opc, catchBlock, src2, src1));
2156 gen(DecRef, src2);
2157 gen(DecRef, src1);
2160 // Return a constant SSATmp representing a static value held in a
2161 // TypedValue. The TypedValue may be a non-scalar, but it must have a
2162 // static value.
2163 SSATmp* HhbcTranslator::staticTVCns(const TypedValue* tv) {
2164 switch (tv->m_type) {
2165 case KindOfNull: return cns(Type::InitNull);
2166 case KindOfBoolean: return cns(!!tv->m_data.num);
2167 case KindOfInt64: return cns(tv->m_data.num);
2168 case KindOfString:
2169 case KindOfStaticString: return cns(tv->m_data.pstr);
2170 case KindOfDouble: return cns(tv->m_data.dbl);
2171 case KindOfArray: return cns(tv->m_data.parr);
2172 default: always_assert(0);
2176 void HhbcTranslator::emitClsCnsD(int32_t cnsNameId, int32_t clsNameId,
2177 Type outPred) {
2178 auto const clsNameStr = lookupStringId(clsNameId);
2179 auto const cnsNameStr = lookupStringId(cnsNameId);
2180 auto const clsCnsName = ClsCnsName { clsNameStr, cnsNameStr };
2182 // If we have to side exit, do the RDS lookup before chaining to
2183 // another Tracelet so forward progress still happens.
2184 auto catchBlock = makeCatchNoSpill();
2185 auto const sideExit = makeSideExit(
2186 nextBcOff(),
2187 [&] {
2188 return gen(LookupClsCns, catchBlock, clsCnsName);
2193 * If the class is already defined in this request, and this
2194 * constant is a scalar constant, we can just compile it to a
2195 * literal.
2197 * We need to guard at runtime that the class is defined in this
2198 * request and has the Class* we expect. If the class is persistent
2199 * or a parent of the current context, we don't need the guard.
2201 if (auto const cls = Unit::lookupClass(clsNameStr)) {
2202 Slot ignore;
2203 auto const tv = cls->cnsNameToTV(cnsNameStr, ignore);
2204 if (tv && tv->m_type != KindOfUninit) {
2205 if (!classIsPersistentOrCtxParent(cls)) {
2206 gen(CheckDefinedClsEq, CheckDefinedClsData{clsNameStr, cls}, sideExit);
2208 push(staticTVCns(tv));
2209 return;
2213 auto guardType = Type::UncountedInit;
2214 if (outPred.strictSubtypeOf(guardType)) guardType = outPred;
2215 auto const cns = gen(LdClsCns, sideExit, clsCnsName, guardType);
2216 push(cns);
2219 void HhbcTranslator::emitAKExists() {
2220 SSATmp* arr = popC();
2221 SSATmp* key = popC();
2223 if (!arr->isA(Type::Arr) && !arr->isA(Type::Obj)) {
2224 PUNT(AKExists_badArray);
2226 if (!key->isA(Type::Str) && !key->isA(Type::Int) && !key->isA(Type::Null)) {
2227 PUNT(AKExists_badKey);
2230 push(gen(AKExists, arr, key));
2231 gen(DecRef, arr);
2232 gen(DecRef, key);
2235 void HhbcTranslator::emitFPassR() {
2236 emitUnboxRAux();
2239 void HhbcTranslator::emitFPassCOp() {
2242 void HhbcTranslator::emitFPassV() {
2243 Block* exit = makeExit();
2244 SSATmp* tmp = popV();
2245 pushIncRef(gen(LdRef, exit, tmp->type().innerType(), tmp));
2246 gen(DecRef, tmp);
2249 void HhbcTranslator::emitFPushCufIter(int32_t numParams,
2250 int32_t itId) {
2251 auto sp = spillStack();
2252 m_fpiStack.emplace(sp, m_irb->spOffset());
2253 gen(CufIterSpillFrame,
2254 FPushCufData(numParams, itId),
2255 sp, m_irb->fp());
2258 static const Func* findCuf(Op op,
2259 SSATmp* callable,
2260 Class* ctx,
2261 Class*& cls,
2262 StringData*& invName,
2263 bool& forward) {
2264 cls = nullptr;
2265 invName = nullptr;
2267 const StringData* str =
2268 callable->isA(Type::Str) && callable->isConst() ? callable->strVal()
2269 : nullptr;
2270 const ArrayData* arr =
2271 callable->isA(Type::Arr) && callable->isConst() ? callable->arrVal()
2272 : nullptr;
2274 StringData* sclass = nullptr;
2275 StringData* sname = nullptr;
2276 if (str) {
2277 Func* f = Unit::lookupFunc(str);
2278 if (f) return f;
2279 String name(const_cast<StringData*>(str));
2280 int pos = name.find("::");
2281 if (pos <= 0 || pos + 2 >= name.size() ||
2282 name.find("::", pos + 2) != String::npos) {
2283 return nullptr;
2285 sclass = makeStaticString(name.substr(0, pos).get());
2286 sname = makeStaticString(name.substr(pos + 2).get());
2287 } else if (arr) {
2288 if (arr->size() != 2) return nullptr;
2289 const Variant& e0 = arr->get(int64_t(0), false);
2290 const Variant& e1 = arr->get(int64_t(1), false);
2291 if (!e0.isString() || !e1.isString()) return nullptr;
2292 sclass = e0.getStringData();
2293 sname = e1.getStringData();
2294 String name(sname);
2295 if (name.find("::") != String::npos) return nullptr;
2296 } else {
2297 return nullptr;
2300 if (sclass->isame(s_self.get())) {
2301 if (!ctx) return nullptr;
2302 cls = ctx;
2303 forward = true;
2304 } else if (sclass->isame(s_parent.get())) {
2305 if (!ctx || !ctx->parent()) return nullptr;
2306 cls = ctx->parent();
2307 forward = true;
2308 } else if (sclass->isame(s_static.get())) {
2309 return nullptr;
2310 } else {
2311 cls = Unit::lookupUniqueClass(sclass);
2312 if (!cls) return nullptr;
2315 bool magicCall = false;
2316 const Func* f = lookupImmutableMethod(cls, sname, magicCall,
2317 /* staticLookup = */ true, ctx);
2318 if (!f || (forward && !ctx->classof(f->cls()))) {
2320 * To preserve the invariant that the lsb class
2321 * is an instance of the context class, we require
2322 * that f's class is an instance of the context class.
2323 * This is conservative, but without it, we would need
2324 * a runtime check to decide whether or not to forward
2325 * the lsb class
2327 return nullptr;
2329 if (magicCall) invName = sname;
2330 return f;
2333 bool HhbcTranslator::emitFPushCufArray(SSATmp* callable, int32_t numParams) {
2334 if (!callable->isA(Type::Arr)) return false;
2336 auto callableInst = callable->inst();
2337 if (!callableInst->is(NewPackedArray)) return false;
2339 auto callableSize = callableInst->src(0);
2340 if (!callableSize->isConst() ||
2341 callableSize->intVal() != 2) {
2342 return false;
2345 auto method = getStackValue(m_irb->sp(), 0).value;
2346 auto object = getStackValue(m_irb->sp(), 1).value;
2347 if (!method || !object) return false;
2349 if (!method->isConst(Type::Str) ||
2350 strstr(method->strVal()->data(), "::") != nullptr) {
2351 return false;
2354 if (!object->isA(Type::Obj)) {
2355 if (!object->type().equals(Type::Cell)) return false;
2356 // This is probably an object, and we just haven't guarded on
2357 // the type. Do so now.
2358 auto exit = makeExit();
2359 object = gen(CheckType, Type::Obj, exit, object);
2361 m_irb->constrainValue(object, DataTypeSpecific);
2363 popC();
2365 gen(IncRef, object);
2366 emitFPushObjMethodCommon(object,
2367 method->strVal(),
2368 numParams,
2369 false /* shouldFatal */,
2370 callable);
2371 gen(DecRef, callable);
2372 return true;
2375 // FPushCuf when the callee is not known at compile time.
2376 void HhbcTranslator::emitFPushCufUnknown(Op op, int32_t numParams) {
2377 if (op != Op::FPushCuf) {
2378 PUNT(emitFPushCufUnknown-nonFPushCuf);
2381 if (topC()->isA(Type::Obj)) {
2382 return emitFPushFuncObj(numParams);
2385 if (!topC()->type().subtypeOfAny(Type::Arr, Type::Str)) {
2386 PUNT(emitFPushCufUnknown);
2389 // Peek at the top of the stack before deciding to pop it.
2390 auto const callable = topC();
2391 if (emitFPushCufArray(callable, numParams)) return;
2393 popC();
2395 emitFPushActRec(
2396 cns(Type::Nullptr),
2397 cns(Type::Nullptr),
2398 numParams,
2399 nullptr
2401 auto const actRec = spillStack();
2404 * This is a similar case to lookup for functions in FPushFunc or
2405 * FPushObjMethod. We can throw in a weird situation where the
2406 * ActRec is already on the stack, but this bytecode isn't done
2407 * executing yet. See arPreliveOverwriteCells for details about why
2408 * we need this marker.
2410 updateMarker();
2412 auto const opcode = callable->isA(Type::Arr) ? LdArrFPushCuf
2413 : LdStrFPushCuf;
2414 gen(opcode, makeCatch({callable}, 1), callable, actRec, m_irb->fp());
2415 gen(DecRef, callable);
2418 void HhbcTranslator::emitFPushCufOp(Op op, int32_t numArgs) {
2419 const bool safe = op == OpFPushCufSafe;
2420 bool forward = op == OpFPushCufF;
2421 SSATmp* callable = topC(safe ? 1 : 0);
2423 Class* cls = nullptr;
2424 StringData* invName = nullptr;
2425 auto const callee = findCuf(op, callable, curClass(), cls, invName, forward);
2426 if (!callee) return emitFPushCufUnknown(op, numArgs);
2428 SSATmp* ctx;
2429 SSATmp* safeFlag = cns(true); // This is always true until the slow exits
2430 // below are implemented
2431 SSATmp* func = cns(callee);
2432 if (cls) {
2433 auto const exitSlow = makeExitSlow();
2434 if (!RDS::isPersistentHandle(cls->classHandle())) {
2435 // The miss path is complicated and rare. Punt for now. This
2436 // must be checked before we IncRef the context below, because
2437 // the slow exit will want to do that same IncRef via InterpOne.
2438 gen(LdClsCachedSafe, exitSlow, cns(cls->name()));
2441 if (forward) {
2442 ctx = gen(LdCtx, FuncData(curFunc()), m_irb->fp());
2443 ctx = gen(GetCtxFwdCall, ctx, cns(callee));
2444 } else {
2445 ctx = genClsMethodCtx(callee, cls);
2447 } else {
2448 ctx = cns(Type::Nullptr);
2449 if (!RDS::isPersistentHandle(callee->funcHandle())) {
2450 // The miss path is complicated and rare. Punt for now.
2451 func = gen(
2452 LdFuncCachedSafe, LdFuncCachedData(callee->name()), makeExitSlow()
2457 SSATmp* defaultVal = safe ? popC() : nullptr;
2458 popDecRef(Type::Cell); // callable
2459 if (safe) {
2460 push(defaultVal);
2461 push(safeFlag);
2464 emitFPushActRec(func, ctx, numArgs, invName);
2467 void HhbcTranslator::emitNativeImpl() {
2468 if (isInlining()) return emitNativeImplInlined();
2470 gen(NativeImpl, m_irb->fp());
2471 SSATmp* sp = gen(RetAdjustStack, m_irb->fp());
2472 SSATmp* retAddr = gen(LdRetAddr, m_irb->fp());
2473 SSATmp* fp = gen(FreeActRec, m_irb->fp());
2474 gen(RetCtrl, RetCtrlData(false), sp, fp, retAddr);
2476 // Flag that this trace has a Ret instruction so no ExitTrace is needed
2477 m_hasExit = true;
2480 void HhbcTranslator::emitFPushActRec(SSATmp* func,
2481 SSATmp* objOrClass,
2482 int32_t numArgs,
2483 const StringData* invName) {
2485 * Before allocating an ActRec, we do a spillStack so we'll have a
2486 * StkPtr that represents what the stack will look like after the
2487 * ActRec is popped.
2489 auto actualStack = spillStack();
2490 auto returnSp = actualStack;
2492 m_fpiStack.emplace(returnSp, m_irb->spOffset());
2494 ActRecInfo info;
2495 info.numArgs = numArgs;
2496 info.invName = invName;
2497 gen(
2498 SpillFrame,
2499 info,
2500 // Using actualStack instead of returnSp so SpillFrame still gets
2501 // the src in rVmSp. (TODO(#2288359).)
2502 actualStack,
2503 func,
2504 objOrClass
2506 assert(m_irb->stackDeficit() == 0);
2509 void HhbcTranslator::emitFPushCtorCommon(SSATmp* cls,
2510 SSATmp* obj,
2511 const Func* func,
2512 int32_t numParams) {
2513 push(obj);
2514 auto const fn = [&] {
2515 if (func) return cns(func);
2517 Without the updateMarker, the catch trace will write
2518 obj onto the stack, but the VMRegAnchor will setup the
2519 stack as it was before the FPushCtor*, which (for
2520 FPushCtorD at least) won't include obj
2522 updateMarker();
2523 return gen(LdClsCtor, makeCatch(), cls);
2524 }();
2525 gen(IncRef, obj);
2526 auto numArgsAndFlags = ActRec::encodeNumArgs(numParams, false, false, true);
2527 emitFPushActRec(fn, obj, numArgsAndFlags, nullptr);
2530 void HhbcTranslator::emitFPushCtor(int32_t numParams) {
2531 auto catchBlock = makeCatch();
2532 SSATmp* cls = popA();
2533 SSATmp* obj = gen(AllocObj, catchBlock, cls);
2534 gen(IncRef, obj);
2535 emitFPushCtorCommon(cls, obj, nullptr, numParams);
2538 static bool canInstantiateClass(const Class* cls) {
2539 return cls &&
2540 !(cls->attrs() & (AttrAbstract | AttrInterface | AttrTrait));
2543 void HhbcTranslator::emitInitProps(const Class* cls, Block* catchBlock) {
2544 cls->initPropHandle();
2545 m_irb->ifThen(
2546 [&](Block* taken) {
2547 gen(CheckInitProps, taken, ClassData(cls));
2549 [&] {
2550 m_irb->hint(Block::Hint::Unlikely);
2551 gen(InitProps, catchBlock, ClassData(cls));
2556 void HhbcTranslator::emitInitSProps(const Class* cls, Block* catchBlock) {
2557 cls->initSPropHandles();
2558 m_irb->ifThen(
2559 [&](Block* taken) {
2560 gen(CheckInitSProps, taken, ClassData(cls));
2562 [&] {
2563 m_irb->hint(Block::Hint::Unlikely);
2564 gen(InitSProps, catchBlock, ClassData(cls));
2569 SSATmp* HhbcTranslator::emitAllocObjFast(const Class* cls) {
2570 // If it's an extension class with a custom instance initializer,
2571 // that init function does all the work.
2572 if (cls->instanceCtor()) {
2573 return gen(ConstructInstance, makeCatch(), ClassData(cls));
2576 // First, make sure our property init vectors are all set up
2577 bool props = cls->pinitVec().size() > 0;
2578 bool sprops = cls->numStaticProperties() > 0;
2579 assert((props || sprops) == cls->needInitialization());
2580 if (cls->needInitialization()) {
2581 if (props) emitInitProps(cls, makeCatch());
2582 if (sprops) emitInitSProps(cls, makeCatch());
2585 // Next, allocate the object
2586 auto const ssaObj = gen(NewInstanceRaw, ClassData(cls));
2588 // Initialize the properties
2589 gen(InitObjProps, ClassData(cls), ssaObj);
2591 // Call a custom initializer if one exists
2592 if (cls->callsCustomInstanceInit()) {
2593 return gen(CustomInstanceInit, ssaObj);
2596 return ssaObj;
2599 void HhbcTranslator::emitFPushCtorD(int32_t numParams, int32_t classNameStrId) {
2600 const StringData* className = lookupStringId(classNameStrId);
2602 const Class* cls = Unit::lookupUniqueClass(className);
2603 bool uniqueCls = classIsUnique(cls);
2604 bool persistentCls = classHasPersistentRDS(cls);
2605 bool canInstantiate = canInstantiateClass(cls);
2606 bool fastAlloc =
2607 !RuntimeOption::EnableObjDestructCall &&
2608 persistentCls &&
2609 canInstantiate &&
2610 !cls->callsCustomInstanceInit();
2612 const Func* func = uniqueCls ? cls->getCtor() : nullptr;
2613 if (func && !(func->attrs() & AttrPublic)) {
2614 Class* ctx = curClass();
2615 if (!ctx) {
2616 func = nullptr;
2617 } else if (ctx != cls) {
2618 if ((func->attrs() & AttrPrivate) ||
2619 !(ctx->classof(cls) || cls->classof(ctx))) {
2620 func = nullptr;
2625 auto const ssaCls =
2626 persistentCls ? cns(cls)
2627 : gen(LdClsCached, makeCatch(), cns(className));
2628 auto const obj =
2629 fastAlloc ? emitAllocObjFast(cls)
2630 : gen(AllocObj, makeCatch(), ssaCls);
2631 gen(IncRef, obj);
2632 emitFPushCtorCommon(ssaCls, obj, func, numParams);
2635 const StaticString s_uuinvoke("__invoke");
2638 * The CreateCl opcode is specified as not being allowed before the
2639 * class it creates exists, and closure classes are always unique.
2641 * This means even if we're not in RepoAuthoritative mode, as long as
2642 * this code is reachable it will always use the same closure Class*,
2643 * so we can just burn it into the TC without using RDS.
2645 void HhbcTranslator::emitCreateCl(int32_t numParams, int32_t funNameStrId) {
2646 auto const cls = Unit::lookupUniqueClass(lookupStringId(funNameStrId));
2647 auto const invokeFunc = cls->lookupMethod(s_uuinvoke.get());
2648 auto const clonedFunc = invokeFunc->cloneAndSetClass(curClass());
2649 assert(cls && (cls->attrs() & AttrUnique));
2651 // Although closures can't have destructors, destructing the
2652 // captured values (or captured $this) can lead to user-visible
2653 // side-effects, so we can't use AllocObjFast if
2654 // EnableObjDestructCall is on.
2655 auto const closure =
2656 RuntimeOption::EnableObjDestructCall ? gen(AllocObj, makeCatch(), cns(cls))
2657 : emitAllocObjFast(cls);
2658 gen(IncRef, closure);
2660 auto const ctx = [&]{
2661 if (!curClass()) return cns(nullptr);
2662 auto const ldctx = gen(LdCtx, FuncData(curFunc()), m_irb->fp());
2663 if (invokeFunc->attrs() & AttrStatic) {
2664 return gen(ConvClsToCctx, gen(LdClsCtx, ldctx));
2666 gen(IncRefCtx, ldctx);
2667 return ldctx;
2668 }();
2669 gen(StClosureCtx, closure, ctx);
2670 gen(StClosureFunc, FuncData(clonedFunc), closure);
2672 SSATmp* args[numParams];
2673 for (int32_t i = 0; i < numParams; ++i) {
2674 args[numParams - i - 1] = popF();
2677 int32_t propId = 0;
2678 for (; propId < numParams; ++propId) {
2679 gen(
2680 StClosureArg,
2681 PropByteOffset(cls->declPropOffset(propId)),
2682 closure,
2683 args[propId]
2687 // Closure static variables are per instance, and need to start
2688 // uninitialized. After numParams use vars, the remaining instance
2689 // properties hold any static locals.
2690 assert(cls->numDeclProperties() ==
2691 clonedFunc->numStaticLocals() + numParams);
2692 for (int32_t numDeclProperties = cls->numDeclProperties();
2693 propId < numDeclProperties;
2694 ++propId) {
2695 gen(
2696 StClosureArg,
2697 PropByteOffset(cls->declPropOffset(propId)),
2698 closure,
2699 cns(Type::Uninit)
2703 push(closure);
2706 void HhbcTranslator::emitFPushFuncCommon(const Func* func,
2707 const StringData* name,
2708 const StringData* fallback,
2709 int32_t numParams) {
2710 if (func) {
2711 func->validate();
2712 if (func->isNameBindingImmutable(curUnit())) {
2713 emitFPushActRec(cns(func),
2714 cns(Type::Nullptr),
2715 numParams,
2716 nullptr);
2717 return;
2721 // LdFuncCached can throw
2722 auto const catchBlock = makeCatch();
2723 auto const ssaFunc = fallback
2724 ? gen(LdFuncCachedU,
2725 LdFuncCachedUData { name, fallback },
2726 catchBlock)
2727 : gen(LdFuncCached,
2728 LdFuncCachedData { name },
2729 catchBlock);
2730 emitFPushActRec(ssaFunc,
2731 cns(Type::Nullptr),
2732 numParams,
2733 nullptr);
2736 void HhbcTranslator::emitFPushFuncD(int32_t numParams, int32_t funcId) {
2737 const NamedEntityPair& nep = lookupNamedEntityPairId(funcId);
2738 const StringData* name = nep.first;
2739 const Func* func = Unit::lookupFunc(nep.second);
2740 emitFPushFuncCommon(func, name, nullptr, numParams);
2743 void HhbcTranslator::emitFPushFuncU(int32_t numParams,
2744 int32_t funcId,
2745 int32_t fallbackFuncId) {
2746 const NamedEntityPair& nep = lookupNamedEntityPairId(funcId);
2747 const StringData* name = nep.first;
2748 const Func* func = Unit::lookupFunc(nep.second);
2749 const NamedEntityPair& fallbackNep = lookupNamedEntityPairId(fallbackFuncId);
2750 const StringData* fallbackName = fallbackNep.first;
2751 emitFPushFuncCommon(func, name, fallbackName, numParams);
2754 void HhbcTranslator::emitFPushFunc(int32_t numParams) {
2755 if (topC()->isA(Type::Obj)) {
2756 return emitFPushFuncObj(numParams);
2759 if (topC()->isA(Type::Arr)) {
2760 return emitFPushFuncArr(numParams);
2763 if (!topC()->isA(Type::Str)) {
2764 PUNT(FPushFunc_not_Str);
2767 auto const catchBlock = makeCatch();
2768 auto const funcName = popC();
2769 emitFPushActRec(gen(LdFunc, catchBlock, funcName),
2770 cns(Type::Nullptr),
2771 numParams,
2772 nullptr);
2775 void HhbcTranslator::emitFPushFuncObj(int32_t numParams) {
2776 auto const slowExit = makeExitSlow();
2777 auto const obj = popC();
2778 auto const cls = gen(LdObjClass, obj);
2779 auto const func = gen(LdObjInvoke, slowExit, cls);
2780 emitFPushActRec(func, obj, numParams, nullptr);
2783 void HhbcTranslator::emitFPushFuncArr(int32_t numParams) {
2784 auto const thisAR = m_irb->fp();
2786 auto const arr = popC();
2787 emitFPushActRec(
2788 cns(Type::Nullptr),
2789 cns(Type::Nullptr),
2790 numParams,
2791 nullptr);
2792 auto const actRec = spillStack();
2794 // This is special. We need to move the stackpointer incase LdArrFuncCtx
2795 // calls a destructor. Otherwise it would clobber the ActRec we just
2796 // pushed.
2797 updateMarker();
2799 gen(LdArrFuncCtx, makeCatch({arr}, 1), arr, actRec, thisAR);
2800 gen(DecRef, arr);
2803 void HhbcTranslator::emitFPushObjMethodCommon(SSATmp* obj,
2804 const StringData* methodName,
2805 int32_t numParams,
2806 bool shouldFatal,
2807 SSATmp* extraSpill) {
2808 SSATmp* objOrCls = obj;
2809 const Class* baseClass = nullptr;
2810 if (obj->type().isSpecialized()) {
2811 auto cls = obj->type().getClass();
2812 if (!m_irb->constrainValue(obj, TypeConstraint(cls).setWeak())) {
2813 // If we know the class without having to specialize a guard any further,
2814 // use it.
2815 baseClass = cls;
2819 bool magicCall = false;
2820 const Func* func = lookupImmutableMethod(baseClass,
2821 methodName,
2822 magicCall,
2823 /* staticLookup: */
2824 false,
2825 curClass());
2827 if (!func) {
2828 if (baseClass && !(baseClass->attrs() & AttrInterface)) {
2829 LookupResult res =
2830 g_context->lookupObjMethod(func, baseClass, methodName, curClass(),
2831 false);
2832 if (res == LookupResult::MethodFoundWithThis ||
2834 * TODO(#4455926): We don't allow vtable-style dispatch of
2835 * abstract static methods, but not for any real reason
2836 * here. It should be able to work, but needs further
2837 * testing to be enabled.
2839 (res == LookupResult::MethodFoundNoThis && !func->isAbstract())) {
2841 * If we found the func in baseClass, then either:
2842 * a) its private, and this is always going to be the
2843 * called function. This case is handled further down.
2844 * OR
2845 * b) any derived class must have a func that matches in staticness
2846 * and is at least as accessible (and in particular, you can't
2847 * override a public/protected method with a private method). In
2848 * this case, we emit code to dynamically lookup the method given
2849 * the Object and the method slot, which is the same as func's.
2851 if (!(func->attrs() & AttrPrivate)) {
2852 auto const clsTmp = gen(LdObjClass, obj);
2853 auto const funcTmp = gen(
2854 LdClsMethod, clsTmp, cns(-(func->methodSlot() + 1))
2856 if (res == LookupResult::MethodFoundNoThis) {
2857 gen(DecRef, obj);
2858 objOrCls = clsTmp;
2860 emitFPushActRec(funcTmp, objOrCls, numParams,
2861 magicCall ? methodName : nullptr);
2862 return;
2864 } else {
2865 // method lookup did not find anything
2866 func = nullptr; // force lookup
2871 if (func != nullptr) {
2873 * static function: store base class into this slot instead of obj
2874 * and decref the obj that was pushed as the this pointer since
2875 * the obj won't be in the actrec and thus MethodCache::lookup won't
2876 * decref it
2878 * static closure body: we still need to pass the object instance
2879 * for the closure prologue to properly do its dispatch (and
2880 * extract use vars). It will decref it and put the class on the
2881 * actrec before entering the "real" cloned closure body.
2883 if (func->attrs() & AttrStatic && !func->isClosureBody()) {
2884 assert(baseClass);
2885 gen(DecRef, obj);
2886 objOrCls = cns(baseClass);
2888 emitFPushActRec(cns(func),
2889 objOrCls,
2890 numParams,
2891 magicCall ? methodName : nullptr);
2892 return;
2895 fpushObjMethodUnknown(obj, methodName, numParams, shouldFatal, extraSpill);
2898 // Pushing for object method when we don't know the Func* statically.
2899 void HhbcTranslator::fpushObjMethodUnknown(SSATmp* obj,
2900 const StringData* methodName,
2901 int32_t numParams,
2902 bool shouldFatal,
2903 SSATmp* extraSpill) {
2904 spillStack();
2905 emitFPushActRec(cns(Type::Nullptr), // Will be set by LdObjMethod
2906 obj,
2907 numParams,
2908 nullptr);
2909 auto const actRec = spillStack();
2910 auto const objCls = gen(LdObjClass, obj);
2912 // This is special. We need to move the stackpointer in case
2913 // LdObjMethod calls a destructor. Otherwise it would clobber the
2914 // ActRec we just pushed.
2915 updateMarker();
2916 Block* catchBlock;
2917 if (extraSpill) {
2919 * If LdObjMethod throws, it nulls out the ActRec (since the unwinder
2920 * will attempt to destroy it as if it were cells), and then writes
2921 * obj into the last entry, since we need it to be destroyed.
2922 * If we have another object to destroy, we should write it in
2923 * the first - so pop 1 cell, then push extraSpill.
2925 std::vector<SSATmp*> spill{extraSpill};
2926 catchBlock = makeCatch(spill, 1);
2927 } else {
2928 catchBlock = makeCatchNoSpill();
2930 gen(LdObjMethod,
2931 LdObjMethodData { methodName, shouldFatal },
2932 catchBlock,
2933 objCls,
2934 actRec);
2937 void HhbcTranslator::emitFPushObjMethodD(int32_t numParams,
2938 int32_t methodNameStrId) {
2939 auto const obj = popC();
2940 if (!obj->isA(Type::Obj)) PUNT(FPushObjMethodD-nonObj);
2941 auto const methodName = lookupStringId(methodNameStrId);
2942 emitFPushObjMethodCommon(obj, methodName, numParams, true /* shouldFatal */);
2945 SSATmp* HhbcTranslator::genClsMethodCtx(const Func* callee, const Class* cls) {
2946 bool mustBeStatic = true;
2948 if (!(callee->attrs() & AttrStatic) &&
2949 !(curFunc()->attrs() & AttrStatic) &&
2950 curClass()) {
2951 if (curClass()->classof(cls)) {
2952 // In this case, it might not be static, but we can be sure
2953 // we're going to forward $this if thisAvailable.
2954 mustBeStatic = false;
2955 } else if (cls->classof(curClass())) {
2956 // Unlike the above, we might be calling down to a subclass that
2957 // is not related to the current instance. To know whether this
2958 // call forwards $this requires a runtime type check, so we have
2959 // to punt instead of trying the thisAvailable path below.
2960 PUNT(getClsMethodCtx-PossibleStaticRelatedCall);
2964 if (mustBeStatic) {
2965 // static function: ctx is just the Class*. LdCls will simplify to a
2966 // DefConst or LdClsCached.
2967 return gen(LdCls, makeCatch(), cns(cls->name()), cns(curClass()));
2969 if (m_irb->thisAvailable()) {
2970 // might not be a static call and $this is available, so we know it's
2971 // definitely not static
2972 assert(curClass());
2973 auto this_ = gen(LdThis, m_irb->fp());
2974 gen(IncRef, this_);
2975 return this_;
2977 // might be a non-static call. we have to inspect the func at runtime
2978 PUNT(getClsMethodCtx-MightNotBeStatic);
2981 void HhbcTranslator::emitFPushClsMethodD(int32_t numParams,
2982 int32_t methodNameStrId,
2983 int32_t clssNamedEntityPairId) {
2985 auto const methodName = lookupStringId(methodNameStrId);
2986 auto const& np = lookupNamedEntityPairId(clssNamedEntityPairId);
2987 auto const className = np.first;
2988 auto const baseClass = Unit::lookupUniqueClass(np.second);
2989 bool magicCall = false;
2991 if (auto const func = lookupImmutableMethod(baseClass,
2992 methodName,
2993 magicCall,
2994 true /* staticLookup */,
2995 curClass())) {
2996 auto const objOrCls = genClsMethodCtx(func, baseClass);
2997 emitFPushActRec(cns(func),
2998 objOrCls,
2999 numParams,
3000 func && magicCall ? methodName : nullptr);
3001 return;
3004 auto const slowExit = makeExitSlow();
3005 auto const data = ClsMethodData{className, methodName, np.second};
3007 // Look up the Func* in the targetcache. If it's not there, try the slow
3008 // path. If that fails, slow exit.
3009 auto const func = m_irb->cond(
3011 [&] (Block* taken) {
3012 return gen(CheckNonNull, taken, gen(LdClsMethodCacheFunc, data));
3014 [&] (SSATmp* func) { // next
3015 return func;
3017 [&] { // taken
3018 m_irb->hint(Block::Hint::Unlikely);
3019 auto result = gen(LookupClsMethodCache, makeCatch(), data,
3020 m_irb->fp());
3021 return gen(CheckNonNull, slowExit, result);
3024 auto const clsCtx = gen(LdClsMethodCacheCls, data);
3026 emitFPushActRec(func,
3027 clsCtx,
3028 numParams,
3029 nullptr);
3032 void HhbcTranslator::emitFPushClsMethod(int32_t numParams) {
3033 auto const clsVal = popA();
3034 auto const methVal = popC();
3036 if (!methVal->isA(Type::Str) || !clsVal->isA(Type::Cls)) {
3037 PUNT(FPushClsMethod-unknownType);
3040 if (methVal->isConst() && clsVal->inst()->op() == LdClsCctx) {
3042 * Optimize FPushClsMethod when the method is a known static
3043 * string and the input class is the context. The common bytecode
3044 * pattern here is LateBoundCls ; FPushClsMethod.
3046 * This logic feels like it belongs in the simplifier, but the
3047 * generated code for this case is pretty different, since we
3048 * don't need the pre-live ActRec trick.
3050 auto const cls = curClass();
3051 const Func* func;
3052 auto res =
3053 g_context->lookupClsMethod(func,
3054 cls,
3055 methVal->strVal(),
3056 nullptr,
3057 cls,
3058 false);
3059 if (res == LookupResult::MethodFoundNoThis) {
3060 auto funcTmp = gen(LdClsMethod, clsVal, cns(-(func->methodSlot() + 1)));
3061 emitFPushActRec(funcTmp, clsVal, numParams, nullptr);
3062 return;
3066 emitFPushActRec(cns(Type::Nullptr),
3067 cns(Type::Nullptr),
3068 numParams,
3069 nullptr);
3070 auto const actRec = spillStack();
3073 * Similar to FPushFunc/FPushObjMethod, we have an incomplete ActRec
3074 * on the stack and must handle that properly if we throw.
3076 updateMarker();
3078 gen(LookupClsMethod, makeCatch({methVal, clsVal}), clsVal, methVal, actRec,
3079 m_irb->fp());
3080 gen(DecRef, methVal);
3083 void HhbcTranslator::emitFPushClsMethodF(int32_t numParams) {
3084 auto const exitBlock = makeExitSlow();
3086 auto classTmp = top(Type::Cls);
3087 auto methodTmp = topC(1, DataTypeGeneric);
3088 assert(classTmp->isA(Type::Cls));
3089 if (!classTmp->isConst() || !methodTmp->isConst(Type::Str)) {
3090 PUNT(FPushClsMethodF-unknownClassOrMethod);
3092 m_irb->constrainValue(methodTmp, DataTypeSpecific);
3094 auto const cls = classTmp->clsVal();
3095 auto const methName = methodTmp->strVal();
3097 bool magicCall = false;
3098 auto const vmfunc = lookupImmutableMethod(cls,
3099 methName,
3100 magicCall,
3101 true /* staticLookup */,
3102 curClass());
3103 auto const catchBlock = vmfunc ? nullptr : makeCatch();
3104 discard(2);
3106 auto const curCtxTmp = gen(LdCtx, FuncData(curFunc()), m_irb->fp());
3107 if (vmfunc) {
3108 auto const funcTmp = cns(vmfunc);
3109 auto const newCtxTmp = gen(GetCtxFwdCall, curCtxTmp, funcTmp);
3110 emitFPushActRec(funcTmp, newCtxTmp, numParams,
3111 (magicCall ? methName : nullptr));
3112 return;
3115 auto const data = ClsMethodData{cls->name(), methName};
3116 auto const funcTmp = m_irb->cond(
3118 [&](Block* taken) {
3119 return gen(CheckNonNull, taken, gen(LdClsMethodFCacheFunc, data));
3121 [&](SSATmp* func) { // next
3122 return func;
3124 [&] { // taken
3125 m_irb->hint(Block::Hint::Unlikely);
3126 auto result = gen(LookupClsMethodFCache, catchBlock, data,
3127 cns(cls), m_irb->fp());
3128 return gen(CheckNonNull, exitBlock, result);
3131 auto const ctx = gen(GetCtxFwdCallDyn, data, curCtxTmp);
3133 emitFPushActRec(funcTmp,
3134 ctx,
3135 numParams,
3136 magicCall ? methName : nullptr);
3139 void HhbcTranslator::emitFCallArray(const Offset pcOffset,
3140 const Offset after,
3141 bool destroyLocals) {
3142 auto const stack = spillStack();
3143 gen(CallArray, CallArrayData { pcOffset, after, destroyLocals }, stack);
3146 void HhbcTranslator::emitFCall(uint32_t numParams,
3147 Offset returnBcOffset,
3148 const Func* callee,
3149 bool destroyLocals) {
3150 if (RuntimeOption::EvalRuntimeTypeProfile) {
3151 for (auto i = uint32_t{0}; i < numParams; ++i) {
3152 auto const val = topF(numParams - i - 1);
3153 if (callee != nullptr) {
3154 gen(TypeProfileFunc, TypeProfileData(i), val, cns(callee));
3155 } else {
3156 auto const func = gen(LdARFuncPtr, m_irb->sp(), cns(0));
3157 gen(TypeProfileFunc, TypeProfileData(i), val, func);
3163 * Figure out if we know where we're going already (if a prologue
3164 * was already generated, we don't need to do a whole bind call
3165 * thing again).
3167 * We're skipping magic calls right now because 'callee' will be set
3168 * to __call in some cases (with 86ctor) where we shouldn't really
3169 * call that function (arguable bug in annotation).
3171 * TODO(#4357498): This is currently disabled, because we haven't
3172 * set things up properly to be able to eagerly bind. Because
3173 * code-gen can punt, the code there needs to delay adding these
3174 * smash locations until after we know the translation isn't punted.
3176 auto const knownPrologue = [&]() -> TCA {
3177 if (false) {
3178 if (!callee || callee->isMagic()) return nullptr;
3179 auto const prologueIndex =
3180 numParams <= callee->numNonVariadicParams()
3181 ? numParams
3182 : callee->numNonVariadicParams() + 1;
3183 TCA ret;
3184 if (!mcg->checkCachedPrologue(callee, prologueIndex, ret)) {
3185 return nullptr;
3187 return ret;
3189 return nullptr;
3190 }();
3192 auto const sp = spillStack();
3193 gen(
3194 Call,
3195 CallData {
3196 numParams,
3197 returnBcOffset,
3198 callee,
3199 destroyLocals,
3200 knownPrologue
3203 m_irb->fp()
3205 if (!m_fpiStack.empty()) {
3206 m_fpiStack.pop();
3210 const StaticString
3211 s_count("count"),
3212 s_getCustomBoolSettingFuncName("server_get_custom_bool_setting");
3214 SSATmp* HhbcTranslator::optimizedCallCount() {
3215 auto const mode = top(Type::Int, 0);
3216 auto const val = top(Type::Gen, 1);
3218 // Bail if we're trying to do a recursive count()
3219 if (!mode->isConst(0)) return nullptr;
3221 return gen(Count, makeCatch(), val);
3224 SSATmp* HhbcTranslator::optimizedServerGetCustomBoolSetting() {
3225 Type settingNameTmpType = topType(1);
3226 Type defaultValueTmpType = topType(0);
3228 // Only generate the optimized version if the types match exactly
3229 if (!(settingNameTmpType <= Type::StaticStr) ||
3230 !(defaultValueTmpType <= Type::Bool)) return nullptr;
3232 auto const settingNameTmp = top(Type::Str, 1);
3233 const StringData *settingName = settingNameTmp->strVal();
3235 bool settingValue = false;
3236 if (!RuntimeOption::GetServerCustomBoolSetting(settingName->toCppString(),
3237 settingValue)) {
3238 // The value isn't present in the CustomSettings section of config.hdf so
3239 // we will simply push the default value argument.
3240 return top(Type::Bool, 0);
3241 } else {
3242 // We found the setting so return the value from config.hdf
3243 return cns(settingValue);
3247 bool HhbcTranslator::optimizedFCallBuiltin(const Func* func,
3248 uint32_t numArgs,
3249 uint32_t numNonDefault) {
3250 SSATmp* res = nullptr;
3251 switch (numArgs) {
3252 case 2:
3253 if (func->name()->isame(s_count.get())) res = optimizedCallCount();
3254 else if (func->name()->isame(s_getCustomBoolSettingFuncName.get())) {
3255 res = optimizedServerGetCustomBoolSetting();
3257 break;
3258 default: break;
3261 if (res == nullptr) return false;
3263 // Decref and free args
3264 for (int i = 0; i < numArgs; i++) {
3265 auto const arg = popR();
3266 if (i >= numArgs - numNonDefault) {
3267 gen(DecRef, arg);
3271 push(res);
3272 return true;
3275 void HhbcTranslator::emitFCallBuiltinCoerce(const Func* callee,
3276 uint32_t numArgs,
3277 uint32_t numNonDefault,
3278 bool destroyLocals) {
3280 * Spill args to stack. Some of the arguments may be passed by
3281 * reference, for which case we will pass a stack address.
3283 * The CallBuiltin instruction itself doesn't depend on the stack
3284 * pointer, but if any of its args were passed via pointers to the
3285 * stack it will indirectly depend on it.
3287 spillStack();
3289 // Convert types if needed.
3290 for (int i = 0; i < numNonDefault; i++) {
3291 auto const& pi = callee->params()[i];
3292 switch (pi.builtinType()) {
3293 case KindOfBoolean:
3294 case KindOfInt64:
3295 case KindOfDouble:
3296 case KindOfArray:
3297 case KindOfObject:
3298 case KindOfResource:
3299 case KindOfString:
3300 gen(CoerceStk,
3301 Type(pi.builtinType()),
3302 StackOffset(numArgs - i - 1),
3303 makeExitSlow(),
3304 m_irb->sp());
3305 break;
3306 case KindOfUnknown:
3307 break;
3308 default:
3309 not_reached();
3313 // Pass arguments for CallBuiltin.
3314 SSATmp* args[numArgs];
3315 for (int i = numArgs - 1; i >= 0; i--) {
3316 auto const& pi = callee->params()[i];
3317 switch (pi.builtinType()) {
3318 case KindOfBoolean:
3319 case KindOfInt64:
3320 case KindOfDouble:
3321 args[i] = top(Type(pi.builtinType()),
3322 numArgs - i - 1);
3323 break;
3324 default:
3325 args[i] = ldStackAddr(numArgs - i - 1, DataTypeSpecific);
3326 break;
3330 // Generate call and set return type
3331 auto const retDt = callee->returnType();
3332 auto retType = retDt == KindOfUnknown ? Type::Cell : Type(retDt);
3333 if (callee->attrs() & ClassInfo::IsReference) retType = retType.box();
3335 SSATmp** const decayedPtr = &args[0];
3336 auto const ret = gen(
3337 CallBuiltin,
3338 retType,
3339 CallBuiltinData { callee, destroyLocals },
3340 makeCatch(),
3341 std::make_pair(numArgs, decayedPtr)
3344 // Decref and free args
3345 for (int i = 0; i < numArgs; i++) {
3346 auto const arg = popR();
3347 if (i >= numArgs - numNonDefault) {
3348 gen(DecRef, arg);
3352 push(ret);
3355 void HhbcTranslator::emitFCallBuiltin(uint32_t numArgs,
3356 uint32_t numNonDefault,
3357 int32_t funcId,
3358 bool destroyLocals) {
3359 const NamedEntity* ne = lookupNamedEntityId(funcId);
3360 const Func* callee = Unit::lookupFunc(ne);
3362 callee->validate();
3364 if (optimizedFCallBuiltin(callee, numArgs, numNonDefault)) return;
3366 if (callee->isParamCoerceMode()) {
3367 return emitFCallBuiltinCoerce(callee, numArgs, numNonDefault,
3368 destroyLocals);
3371 emitBuiltinCall(callee,
3372 numArgs,
3373 numNonDefault,
3374 nullptr, /* no this */
3375 false, /* not inlining */
3376 false, /* not inlining constructor */
3377 destroyLocals,
3378 [&](uint32_t) { return popC(); });
3381 template<class GetArg>
3382 void HhbcTranslator::emitBuiltinCall(const Func* callee,
3383 uint32_t numArgs,
3384 uint32_t numNonDefault,
3385 SSATmp* paramThis,
3386 bool inlining,
3387 bool wasInliningConstructor,
3388 bool destroyLocals,
3389 GetArg getArg) {
3390 // Collect the parameter locals---we'll need them later. Also
3391 // determine which ones will need to be passed through the eval
3392 // stack.
3393 smart::vector<SSATmp*> paramSSAs(numArgs);
3394 smart::vector<bool> paramThroughStack(numArgs);
3395 smart::vector<bool> paramNeedsConversion(numArgs);
3396 auto numParamsThroughStack = uint32_t{0};
3397 for (auto i = uint32_t{0}; i < numArgs; ++i) {
3398 // Fill in paramSSAs in reverse, since they may come from popC's.
3399 auto const offset = numArgs - i - 1;
3400 paramSSAs[offset] = getArg(offset);
3402 auto const& pi = callee->params()[offset];
3403 switch (pi.builtinType()) {
3404 case KindOfBoolean:
3405 case KindOfInt64:
3406 case KindOfDouble:
3407 paramThroughStack[offset] = false;
3408 break;
3409 default:
3410 ++numParamsThroughStack;
3411 paramThroughStack[offset] = true;
3412 break;
3415 paramNeedsConversion[offset] = offset < numNonDefault
3416 && pi.builtinType() != KindOfUnknown;
3419 // For the same reason that we have to IncRef the locals above, we
3420 // need to grab one on the $this.
3421 if (paramThis) gen(IncRef, paramThis);
3423 if (inlining) emitEndInlinedCommon(); /////// leaving inlined function
3426 * Everything that needs to be on the stack gets spilled now.
3428 * Note: right here we should eventually be handling the possibility
3429 * that we need to coerce parameters. But right now any situation
3430 * requiring that is disabled in shouldIRInline.
3432 if (numParamsThroughStack != 0 || !inlining) {
3433 for (auto i = uint32_t{0}; i < numArgs; ++i) {
3434 if (paramThroughStack[i]) {
3435 push(paramSSAs[i]);
3438 // We're going to do ldStackAddrs on these, so the stack must be
3439 // materialized:
3440 spillStack();
3441 // This marker update is to make sure rbx points to the bottom of
3442 // our stack when we enter our catch trace. The catch trace
3443 // twiddles the VM registers directly on the execution context to
3444 // make the unwinder understand the situation, however.
3445 updateMarker();
3449 * We have an unusual situation if we raise an exception:
3451 * The unwinder is going to see our PC as equal to the FCallD for
3452 * the call to this NativeImpl instruction. This means the PC will
3453 * be inside the FPI region for the call, so it'll try to pop an
3454 * ActRec.
3456 * Meanwhile, we've just exited the inlined callee (and its frame
3457 * was hopefully removed by dce), and then pushed any of our
3458 * by-reference arguments on the eval stack. So, if we throw, we
3459 * need to pop anything we pushed, put down a fake ActRec, and then
3460 * eagerly sync VM regs to represent that stack depth.
3462 auto const makeUnusualCatch = [&] { return makeCatchImpl([&] {
3463 // TODO(#4323657): this is generating generic DecRefs at the time
3464 // of this writing---probably we're not handling the stack chain
3465 // correctly in a catch block.
3466 for (auto i = uint32_t{0}; i < numArgs; ++i) {
3467 if (paramThroughStack[i]) {
3468 popDecRef(Type::Gen);
3469 } else {
3470 gen(DecRef, paramSSAs[i]);
3473 if (inlining) {
3474 emitFPushActRec(cns(callee),
3475 paramThis ? paramThis : cns(Type::Nullptr),
3476 ActRec::encodeNumArgs(numArgs,
3477 false /* localsDecRefd */,
3478 false /* resumed */,
3479 wasInliningConstructor),
3480 nullptr);
3482 for (auto i = uint32_t{0}; i < numArgs; ++i) {
3483 // TODO(#4313939): it's not actually necessary to push these
3484 // nulls.
3485 push(cns(Type::InitNull));
3487 auto const stack = spillStack();
3488 gen(SyncABIRegs, m_irb->fp(), stack);
3489 gen(EagerSyncVMRegs, m_irb->fp(), stack);
3490 return stack;
3491 }); };
3494 * Prepare the actual arguments to the CallBuiltin instruction. If
3495 * any of the parameters need type conversions, we need to handle
3496 * that too.
3498 auto const cbNumArgs = numArgs + (paramThis ? 1 : 0);
3499 SSATmp* args[cbNumArgs];
3501 auto argIdx = uint32_t{0};
3502 auto stackIdx = uint32_t{0};
3504 if (paramThis) args[argIdx++] = paramThis;
3505 for (auto i = uint32_t{0}; i < numArgs; ++i) {
3506 if (!paramThroughStack[i]) {
3507 if (paramNeedsConversion[i]) {
3508 auto const ty = Type(callee->params()[i].builtinType());
3509 auto const oldVal = paramSSAs[i];
3510 paramSSAs[i] = [&] {
3511 if (ty <= Type::Int) {
3512 return gen(ConvCellToInt, makeUnusualCatch(), oldVal);
3514 if (ty <= Type::Dbl) {
3515 return gen(ConvCellToDbl, makeUnusualCatch(), oldVal);
3517 always_assert(ty <= Type::Bool); // or will be passed by stack
3518 return gen(ConvCellToBool, oldVal);
3519 }();
3520 gen(DecRef, oldVal);
3522 args[argIdx++] = paramSSAs[i];
3523 continue;
3526 auto const offset = numParamsThroughStack - stackIdx - 1;
3527 if (paramNeedsConversion[i]) {
3528 Type t(callee->params()[i].builtinType());
3529 if (callee->params()[i].builtinType() == KindOfObject &&
3530 callee->methInfo()->parameters[i]->valueLen > 0) {
3531 t = Type::NullableObj;
3533 gen(CastStk,
3534 makeUnusualCatch(),
3536 StackOffset { static_cast<int32_t>(offset) },
3537 m_irb->sp());
3540 args[argIdx++] = ldStackAddr(offset, DataTypeSpecific);
3541 ++stackIdx;
3544 assert(stackIdx == numParamsThroughStack);
3545 assert(argIdx == cbNumArgs);
3548 // Make the actual call.
3549 auto const retType = [&] {
3550 auto const retDt = callee->returnType();
3551 auto const ret = retDt == KindOfUnknown ? Type::Cell : Type(retDt);
3552 return callee->attrs() & ClassInfo::IsReference ? ret.box() : ret;
3553 }();
3554 SSATmp** decayedPtr = &args[0];
3555 auto const ret = gen(
3556 CallBuiltin,
3557 retType,
3558 CallBuiltinData { callee, destroyLocals },
3559 makeUnusualCatch(),
3560 std::make_pair(cbNumArgs, decayedPtr)
3563 // Pop the stack params and push the return value.
3564 if (paramThis) gen(DecRef, paramThis);
3565 for (auto i = uint32_t{0}; i < numParamsThroughStack; ++i) {
3566 popDecRef(Type::Gen);
3568 push(ret);
3571 void HhbcTranslator::emitEndInlinedCommon() {
3572 assert(!m_fpiActiveStack.empty());
3573 assert(!curFunc()->isPseudoMain());
3575 assert(!resumed());
3577 emitDecRefLocalsInline();
3579 if (curFunc()->mayHaveThis()) {
3580 gen(DecRefThis, m_irb->fp());
3584 * Pop the ActRec and restore the stack and frame pointers. It's
3585 * important that this does endInlining before pushing the return
3586 * value so stack offsets are properly tracked.
3588 gen(InlineReturn, m_irb->fp());
3590 // Return to the caller function. Careful between here and the
3591 // updateMarker() below, where the caller state isn't entirely set up.
3592 m_bcStateStack.pop_back();
3593 m_fpiActiveStack.pop();
3595 updateMarker();
3596 gen(
3597 ReDefSP,
3598 ReDefSPData {
3599 m_irb->spOffset(),
3600 m_irb->inlinedFrameSpansCall()
3602 m_irb->sp(),
3603 m_irb->fp()
3607 * After the end of inlining, we are restoring to a previously
3608 * defined stack that we know is entirely materialized (i.e. in
3609 * memory), so stackDeficit needs to be slammed to zero.
3611 * The push of the return value in the caller of this function is
3612 * not yet materialized.
3614 assert(m_irb->evalStack().numCells() == 0);
3615 m_irb->clearStackDeficit();
3617 FTRACE(1, "]]] end inlining: {}\n", curFunc()->fullName()->data());
3621 * When we're inlining a NativeImpl opcode, we know this is the only
3622 * opcode in the callee method body (bytecode invariant). So in
3623 * order to make sure we can eliminate the SpillFrame, we do the
3624 * CallBuiltin instruction after we've left the inlined frame.
3626 * We may need to pass some arguments to the builtin through the
3627 * stack (e.g. if it takes const Variant&'s)---these are spilled to
3628 * the stack after leaving the callee.
3630 * To make this work, we need to do some weird things in the catch
3631 * trace. ;)
3633 void HhbcTranslator::emitNativeImplInlined() {
3634 auto const callee = curFunc();
3635 assert(callee->nativeFuncPtr());
3637 // Figure out if this inlined function was for an FPushCtor. We'll
3638 // need this creating the unwind block blow.
3639 auto const wasInliningConstructor = [&]() -> bool {
3640 auto const sframe = findSpillFrame(m_irb->sp());
3641 assert(sframe);
3642 return sframe->extra<ActRecInfo>()->isFromFPushCtor();
3643 }();
3645 bool const instanceMethod = callee->isMethod() &&
3646 !(callee->attrs() & AttrStatic);
3647 // Collect the parameter locals---we'll need them later. Also
3648 // determine which ones will need to be passed through the eval
3649 // stack.
3650 auto const numArgs = callee->numParams();
3651 auto const paramThis = instanceMethod ? gen(LdThis, m_irb->fp())
3652 : nullptr;
3654 emitBuiltinCall(callee,
3655 numArgs,
3656 numArgs, /* numNonDefault */
3657 paramThis,
3658 true, /* inlining */
3659 wasInliningConstructor,
3660 false, /* destroyLocals */
3661 [&](uint32_t i) {
3662 auto ret = ldLoc(i, nullptr, DataTypeSpecific);
3663 gen(IncRef, ret);
3664 return ret;
3668 void HhbcTranslator::emitRetFromInlined(Type type) {
3669 auto const retVal = pop(type, DataTypeGeneric);
3670 // Before we leave the inlined frame, grab a type prediction from
3671 // our DefInlineFP.
3672 auto const retPred = frameRoot(
3673 m_irb->fp()->inst())->extra<DefInlineFP>()->retTypePred;
3674 emitEndInlinedCommon();
3675 push(retVal);
3676 if (retPred < retVal->type()) { // TODO: this if statement shouldn't
3677 // be here, because check type
3678 // resolves to the intersection of
3679 // the two types
3680 // If we had a predicted output type that's useful, check that here.
3681 checkTypeStack(0, retPred, curSrcKey().advanced().offset());
3685 void HhbcTranslator::emitDecRefLocalsInline() {
3686 for (int id = curFunc()->numLocals() - 1; id >= 0; --id) {
3687 gen(DecRefLoc, Type::Gen, LocalId(id), m_irb->fp());
3691 void HhbcTranslator::emitRet(Type type, bool freeInline) {
3692 auto const func = curFunc();
3693 if (func->attrs() & AttrMayUseVV) {
3694 // Note: this has to be the first thing, because we cannot bail after
3695 // we start decRefing locs because then there'll be no corresponding
3696 // bytecode boundaries until the end of RetC
3697 gen(ReleaseVVOrExit, makeExitSlow(), m_irb->fp());
3700 // Pop the return value. Since it will be teleported to its place in memory,
3701 // we don't care about the type.
3702 auto catchBlock = makeCatch();
3703 SSATmp* retVal = pop(type, func->isGenerator() ? DataTypeSpecific
3704 : DataTypeGeneric);
3706 // Free local variables.
3707 if (freeInline) {
3708 emitDecRefLocalsInline();
3709 for (unsigned i = 0; i < func->numLocals(); ++i) {
3710 m_irb->constrainLocal(i, DataTypeCountness, "inlined RetC/V");
3712 } else {
3713 gen(GenericRetDecRefs, m_irb->fp());
3716 // Free $this.
3717 if (func->mayHaveThis()) {
3718 gen(DecRefThis, m_irb->fp());
3721 // Call the FunctionReturn hook and put the return value on the stack so that
3722 // the unwinder would decref it.
3723 emitRetSurpriseCheck(m_irb->fp(), retVal, catchBlock, false);
3725 // In async function, wrap the return value into succeeded StaticWaitHandle.
3726 if (!resumed() && func->isAsyncFunction()) {
3727 retVal = gen(CreateSSWH, retVal);
3730 // Type profile return value.
3731 if (RuntimeOption::EvalRuntimeTypeProfile) {
3732 gen(TypeProfileFunc, TypeProfileData(-1), retVal, cns(func));
3735 SSATmp* sp;
3736 SSATmp* resumableObj = nullptr;
3737 if (!resumed()) {
3738 // Store the return value.
3739 gen(StRetVal, m_irb->fp(), retVal);
3741 // Free ActRec.
3742 sp = gen(RetAdjustStack, m_irb->fp());
3743 } else if (func->isAsyncFunction()) {
3744 // Load the parent chain.
3745 auto parentChain = gen(LdAsyncArFParent, m_irb->fp());
3747 // Mark the async function as succeeded.
3748 auto succeeded = c_WaitHandle::toKindState(
3749 c_WaitHandle::Kind::AsyncFunction, c_WaitHandle::STATE_SUCCEEDED);
3750 gen(StAsyncArRaw, RawMemData{RawMemData::AsyncState}, m_irb->fp(),
3751 cns(succeeded));
3753 // Store the return value.
3754 gen(StAsyncArResult, m_irb->fp(), retVal);
3756 // Unblock parents.
3757 gen(BWHUnblockChain, parentChain);
3759 // Sync SP.
3760 sp = spillStack();
3762 // Get the AsyncFunctionWaitHandle.
3763 resumableObj = gen(LdResumableArObj, m_irb->fp());
3764 } else if (func->isNonAsyncGenerator()) {
3765 // Clear generator's key and value.
3766 auto const oldKey = gen(LdContArKey, Type::Cell, m_irb->fp());
3767 gen(StContArKey, m_irb->fp(), cns(Type::InitNull));
3768 gen(DecRef, oldKey);
3770 auto const oldValue = gen(LdContArValue, Type::Cell, m_irb->fp());
3771 gen(StContArValue, m_irb->fp(), cns(Type::InitNull));
3772 gen(DecRef, oldValue);
3774 // Mark generator as finished.
3775 gen(StContArRaw, RawMemData{RawMemData::ContState}, m_irb->fp(),
3776 cns(c_Generator::Done));
3778 // Sync SP.
3779 sp = spillStack();
3780 } else {
3781 not_reached();
3784 // Grab caller info from ActRec.
3785 SSATmp* retAddr = gen(LdRetAddr, m_irb->fp());
3786 SSATmp* fp = gen(FreeActRec, m_irb->fp());
3788 // Drop reference to this resumable.
3789 if (resumableObj != nullptr) {
3790 gen(DecRef, resumableObj);
3793 // Return control to the caller.
3794 gen(RetCtrl, RetCtrlData(false), sp, fp, retAddr);
3796 // Flag that this trace has a Ret instruction, so that no ExitTrace is needed
3797 m_hasExit = true;
3800 void HhbcTranslator::emitRetC(bool freeInline) {
3801 if (isInlining()) {
3802 assert(!resumed());
3803 emitRetFromInlined(Type::Cell);
3804 } else {
3805 emitRet(Type::Cell, freeInline);
3809 void HhbcTranslator::emitRetV(bool freeInline) {
3810 assert(!resumed());
3811 assert(!curFunc()->isResumable());
3812 if (isInlining()) {
3813 emitRetFromInlined(Type::BoxedCell);
3814 } else {
3815 emitRet(Type::BoxedCell, freeInline);
3819 void HhbcTranslator::emitJmpSurpriseCheck(Block* catchBlock) {
3820 m_irb->ifThen([&](Block* taken) {
3821 gen(CheckSurpriseFlags, taken);
3823 [&] {
3824 m_irb->hint(Block::Hint::Unlikely);
3825 gen(SurpriseHook, catchBlock);
3829 void HhbcTranslator::emitRetSurpriseCheck(SSATmp* fp, SSATmp* retVal,
3830 Block* catchBlock,
3831 bool suspendingResumed) {
3832 emitRB(Trace::RBTypeFuncExit, curFunc()->fullName());
3833 m_irb->ifThen([&](Block* taken) {
3834 gen(CheckSurpriseFlags, taken);
3836 [&] {
3837 m_irb->hint(Block::Hint::Unlikely);
3838 if (retVal != nullptr) {
3839 gen(FunctionReturnHook, RetCtrlData(suspendingResumed),
3840 catchBlock, fp, retVal);
3841 } else {
3842 gen(FunctionSuspendHook, RetCtrlData(suspendingResumed),
3843 catchBlock, fp, cns(suspendingResumed));
3848 void HhbcTranslator::emitSwitch(const ImmVector& iv,
3849 int64_t base,
3850 bool bounded) {
3851 int nTargets = bounded ? iv.size() - 2 : iv.size();
3853 auto catchBlock = topC()->isA(Type::Obj) ? makeCatch() : nullptr;
3854 SSATmp* const switchVal = popC();
3855 Type type = switchVal->type();
3856 assert(IMPLIES(!(type <= Type::Int), bounded));
3857 assert(IMPLIES(bounded, iv.size() > 2));
3858 SSATmp* index;
3859 SSATmp* ssabase = cns(base);
3860 SSATmp* ssatargets = cns(nTargets);
3862 Offset defaultOff = bcOff() + iv.vec32()[iv.size() - 1];
3863 Offset zeroOff = 0;
3864 if (base <= 0 && (base + nTargets) > 0) {
3865 zeroOff = bcOff() + iv.vec32()[0 - base];
3866 } else {
3867 zeroOff = defaultOff;
3870 if (type <= Type::Null) {
3871 gen(Jmp, makeExit(zeroOff));
3872 return;
3874 if (type <= Type::Bool) {
3875 Offset nonZeroOff = bcOff() + iv.vec32()[iv.size() - 2];
3876 gen(JmpNZero, makeExit(nonZeroOff), switchVal);
3877 gen(Jmp, makeExit(zeroOff));
3878 return;
3881 if (type <= Type::Int) {
3882 // No special treatment needed
3883 index = switchVal;
3884 } else if (type <= Type::Dbl) {
3885 // switch(Double|String|Obj)Helper do bounds-checking for us, so
3886 // we need to make sure the default case is in the jump table,
3887 // and don't emit our own bounds-checking code
3888 bounded = false;
3889 index = gen(LdSwitchDblIndex, switchVal, ssabase, ssatargets);
3890 } else if (type <= Type::Str) {
3891 bounded = false;
3892 index = gen(LdSwitchStrIndex, switchVal, ssabase, ssatargets);
3893 } else if (type <= Type::Obj) {
3894 // switchObjHelper can throw exceptions and reenter the VM so we use the
3895 // catch block here.
3896 bounded = false;
3897 index = gen(LdSwitchObjIndex, catchBlock, switchVal, ssabase, ssatargets);
3898 } else if (type <= Type::Arr) {
3899 gen(DecRef, switchVal);
3900 gen(Jmp, makeExit(defaultOff));
3901 return;
3902 } else {
3903 PUNT(Switch-UnknownType);
3906 std::vector<Offset> targets(iv.size());
3907 for (int i = 0; i < iv.size(); i++) {
3908 targets[i] = bcOff() + iv.vec32()[i];
3911 JmpSwitchData data;
3912 data.base = base;
3913 data.bounded = bounded;
3914 data.cases = iv.size();
3915 data.defaultOff = defaultOff;
3916 data.targets = &targets[0];
3918 auto const stack = spillStack();
3919 gen(SyncABIRegs, m_irb->fp(), stack);
3921 gen(JmpSwitchDest, data, index);
3922 m_hasExit = true;
3925 void HhbcTranslator::emitSSwitch(const ImmVector& iv) {
3926 const int numCases = iv.size() - 1;
3929 * We use a fast path translation with a hashtable if none of the
3930 * cases are numeric strings and if the input is actually a string.
3932 * Otherwise we do a linear search through the cases calling string
3933 * conversion routines.
3935 const bool fastPath =
3936 topC()->isA(Type::Str) &&
3937 std::none_of(iv.strvec(), iv.strvec() + numCases,
3938 [&](const StrVecItem& item) {
3939 return curUnit()->lookupLitstrId(item.str)->isNumeric();
3943 Block* catchBlock = nullptr;
3944 // The slow path can throw exceptions and reenter the VM.
3945 if (!fastPath) catchBlock = makeCatch();
3947 auto const testVal = popC();
3949 std::vector<LdSSwitchData::Elm> cases(numCases);
3950 for (int i = 0; i < numCases; ++i) {
3951 auto const& kv = iv.strvec()[i];
3952 cases[i].str = curUnit()->lookupLitstrId(kv.str);
3953 cases[i].dest = bcOff() + kv.dest;
3956 LdSSwitchData data;
3957 data.numCases = numCases;
3958 data.cases = &cases[0];
3959 data.defaultOff = bcOff() + iv.strvec()[iv.size() - 1].dest;
3961 SSATmp* dest = gen(fastPath ? LdSSwitchDestFast
3962 : LdSSwitchDestSlow,
3963 catchBlock,
3964 data,
3965 testVal);
3966 gen(DecRef, testVal);
3967 auto const stack = spillStack();
3968 gen(SyncABIRegs, m_irb->fp(), stack);
3969 gen(JmpIndirect, dest);
3970 m_hasExit = true;
3973 void HhbcTranslator::setThisAvailable() {
3974 m_irb->setThisAvailable();
3978 * Emit a type guard, possibly using profiling information. Depending on the
3979 * current translation mode and type to be guarded, this function may emit
3980 * additional profiling code or modify the guarded type using previously
3981 * collected profiling information. Str -> StaticStr is the only supported
3982 * refinement for now.
3984 * type: The original guard type.
3985 * location, id: Name and index used in a profile key like "Loc3" or "Stk0".
3986 * doGuard: Lambda which will be called exactly once to emit the actual guard.
3987 * loadAddr: Lambda which will be called up to once to get a pointer to the
3988 * value being checked.
3990 template<typename G, typename L>
3991 void HhbcTranslator::emitProfiledGuard(Type type, const char* location,
3992 int32_t id, G doGuard, L loadAddr) {
3993 // We really do want to check for exact equality here: if type is StaticStr
3994 // there's nothing for us to do, and we don't support guarding on CountedStr.
3995 if (type != Type::Str ||
3996 (tx->mode() != TransKind::Profile && tx->mode() != TransKind::Optimize)) {
3997 return doGuard(type);
4000 auto profileKey = makeStaticString(folly::to<std::string>(location, id));
4001 TargetProfile<StrProfile> profile(m_context, m_irb->marker(), profileKey);
4002 if (profile.profiling()) {
4003 doGuard(Type::Str);
4004 auto addr = loadAddr();
4005 m_irb->constrainValue(addr, DataTypeSpecific);
4006 gen(ProfileStr, ProfileStrData(profileKey), addr);
4007 } else if (profile.optimizing()) {
4008 auto const data = profile.data(StrProfile::reduce);
4009 auto const total = data.total();
4011 if (data.staticStr == total) doGuard(Type::StaticStr);
4012 else doGuard(Type::Str);
4013 } else {
4014 doGuard(Type::Str);
4018 void HhbcTranslator::guardTypeLocal(uint32_t locId, Type type, bool outerOnly) {
4019 emitProfiledGuard(
4020 type, "Loc", locId,
4021 [&](Type type) { gen(GuardLoc, type, LocalId(locId), m_irb->fp()); },
4022 [&] { return gen(LdLocAddr, Type::PtrToStr, LocalId(locId),
4023 m_irb->fp()); }
4026 if (!outerOnly && type.isBoxed() && type.unbox() < Type::Cell) {
4027 auto const ldrefExit = makeExit();
4028 auto const ldgblExit = makePseudoMainExit();
4029 auto const val = ldLoc(locId, ldgblExit, DataTypeSpecific);
4030 gen(LdRef, type.unbox(), ldrefExit, val);
4034 void HhbcTranslator::guardTypeLocation(const RegionDesc::Location& loc,
4035 Type type, bool outerOnly) {
4036 assert(type <= Type::Gen);
4037 typedef RegionDesc::Location::Tag T;
4038 switch (loc.tag()) {
4039 case T::Stack: guardTypeStack(loc.stackOffset(), type, outerOnly); break;
4040 case T::Local: guardTypeLocal(loc.localId(), type, outerOnly); break;
4044 void HhbcTranslator::checkTypeLocal(uint32_t locId, Type type,
4045 Offset dest /* = -1 */) {
4046 emitProfiledGuard(
4047 type, "Loc", locId,
4048 [&](Type type) {
4049 gen(CheckLoc, type, LocalId(locId), makeExit(dest), m_irb->fp());
4051 [&] { return gen(LdLocAddr, Type::PtrToStr, LocalId(locId),
4052 m_irb->fp()); }
4056 void HhbcTranslator::assertTypeLocal(uint32_t locId, Type type) {
4057 gen(AssertLoc, type, LocalId(locId), m_irb->fp());
4060 void HhbcTranslator::checkType(const RegionDesc::Location& loc,
4061 Type type, Offset dest) {
4062 assert(type <= Type::Gen);
4063 typedef RegionDesc::Location::Tag T;
4064 switch (loc.tag()) {
4065 case T::Stack: checkTypeStack(loc.stackOffset(), type, dest); break;
4066 case T::Local: checkTypeLocal(loc.localId(), type, dest); break;
4070 void HhbcTranslator::assertType(const RegionDesc::Location& loc,
4071 Type type) {
4072 assert(type <= Type::StackElem);
4073 typedef RegionDesc::Location::Tag T;
4074 switch (loc.tag()) {
4075 case T::Stack: assertTypeStack(loc.stackOffset(), type); break;
4076 case T::Local: assertTypeLocal(loc.localId(), type); break;
4080 void HhbcTranslator::guardTypeStack(uint32_t stackIndex, Type type,
4081 bool outerOnly) {
4082 assert(type <= Type::Gen);
4083 assert(m_irb->evalStack().size() == 0);
4084 // This should only be called at the beginning of a trace, with a
4085 // clean stack
4086 assert(m_irb->stackDeficit() == 0);
4087 auto stackOff = StackOffset(stackIndex);
4089 emitProfiledGuard(
4090 type, "Stk", stackIndex,
4091 [&](Type type) { gen(GuardStk, type, stackOff, m_irb->sp()); },
4092 [&] { return gen(LdStackAddr, Type::PtrToStr, stackOff, m_irb->sp()); }
4095 if (!outerOnly && type.isBoxed() && type.unbox() < Type::Cell) {
4096 auto stk = gen(LdStack, Type::BoxedCell, stackOff, m_irb->sp());
4097 m_irb->constrainValue(stk, DataTypeSpecific);
4098 gen(LdRef, type.unbox(), makeExit(), stk);
4102 void HhbcTranslator::checkTypeStack(uint32_t idx, Type type, Offset dest) {
4103 assert(type <= Type::Gen);
4104 auto exit = makeExit(dest);
4105 if (idx < m_irb->evalStack().size()) {
4106 FTRACE(1, "checkTypeStack({}): generating CheckType for {}\n",
4107 idx, type.toString());
4108 // CheckType only cares about its input type if the simplifier does
4109 // something with it and that's handled if and when it happens.
4110 SSATmp* tmp = top(DataTypeGeneric, idx);
4111 assert(tmp);
4112 m_irb->evalStack().replace(idx, gen(CheckType, type, exit, tmp));
4113 } else {
4114 FTRACE(1, "checkTypeStack({}): no tmp: {}\n", idx, type.toString());
4115 // Just like CheckType, CheckStk only cares about its input type if the
4116 // simplifier does something with it.
4118 auto const adjustedOffset =
4119 StackOffset(idx - m_irb->evalStack().size() + m_irb->stackDeficit());
4120 emitProfiledGuard(
4121 type, "Stk", idx,
4122 [&](Type t) {
4123 gen(CheckStk, type, exit, adjustedOffset, m_irb->sp());
4125 [&] {
4126 return gen(LdStackAddr, Type::PtrToStr, adjustedOffset, m_irb->sp());
4132 void HhbcTranslator::checkTypeTopOfStack(Type type, Offset nextByteCode) {
4133 checkTypeStack(0, type, nextByteCode);
4136 void HhbcTranslator::assertTypeStack(uint32_t idx, Type type) {
4137 if (idx < m_irb->evalStack().size()) {
4138 // We're asserting a new type so we don't care about the previous type.
4139 SSATmp* tmp = top(DataTypeGeneric, idx);
4140 assert(tmp);
4141 m_irb->evalStack().replace(idx, gen(AssertType, type, tmp));
4142 } else {
4143 gen(AssertStk, type,
4144 StackOffset(idx - m_irb->evalStack().size() + m_irb->stackDeficit()),
4145 m_irb->sp());
4150 * Creates a RuntimeType struct from a program location. This needs access to
4151 * more than just the location's type because RuntimeType includes known
4152 * constant values. All accesses to the stack and locals use DataTypeGeneric so
4153 * this function should only be used for inspecting state; when the values are
4154 * actually used they must be constrained further.
4156 RuntimeType HhbcTranslator::rttFromLocation(const Location& loc) {
4157 Type t;
4158 SSATmp* val = nullptr;
4159 switch (loc.space) {
4160 case Location::Stack: {
4161 auto i = loc.offset;
4162 assert(i >= 0);
4163 if (i < m_irb->evalStack().size()) {
4164 val = top(DataTypeGeneric, i);
4165 t = val->type();
4166 } else {
4167 auto stackVal =
4168 getStackValue(m_irb->sp(),
4169 i - m_irb->evalStack().size() + m_irb->stackDeficit());
4170 val = stackVal.value;
4171 t = stackVal.knownType;
4172 if (!val && t == Type::StackElem) return RuntimeType(KindOfAny);
4174 } break;
4175 case Location::Local: {
4176 auto l = loc.offset;
4177 val = m_irb->localValue(l, DataTypeGeneric);
4178 t = val ? val->type() : m_irb->localType(l, DataTypeGeneric);
4179 } break;
4180 case Location::Litstr:
4181 return RuntimeType(curUnit()->lookupLitstrId(loc.offset));
4182 case Location::Litint:
4183 return RuntimeType(loc.offset);
4184 case Location::This:
4185 return RuntimeType(KindOfObject, KindOfNone, curFunc()->cls());
4186 case Location::Invalid:
4187 case Location::Iter:
4188 not_reached();
4191 assert(IMPLIES(val, val->type().equals(t)));
4192 if (val && val->isConst()) {
4193 // RuntimeType holds constant Bool, Int, Str, and Cls.
4194 if (val->isA(Type::Bool)) return RuntimeType(val->boolVal());
4195 if (val->isA(Type::Int)) return RuntimeType(val->intVal());
4196 if (val->isA(Type::Str)) return RuntimeType(val->strVal());
4197 if (val->isA(Type::Cls)) return RuntimeType(val->clsVal());
4200 return t.toRuntimeType();
4203 static uint64_t packBitVec(const std::vector<bool>& bits, unsigned i) {
4204 uint64_t retval = 0;
4205 assert(i % 64 == 0);
4206 assert(i < bits.size());
4207 while (i < bits.size()) {
4208 retval |= bits[i] << (i % 64);
4209 if ((++i % 64) == 0) {
4210 break;
4213 return retval;
4216 void HhbcTranslator::guardRefs(int64_t entryArDelta,
4217 const std::vector<bool>& mask,
4218 const std::vector<bool>& vals) {
4219 int32_t actRecOff = cellsToBytes(entryArDelta);
4220 SSATmp* funcPtr = gen(LdARFuncPtr, m_irb->sp(), cns(actRecOff));
4221 SSATmp* nParams = nullptr;
4223 for (unsigned i = 0; i < mask.size(); i += 64) {
4224 assert(i < vals.size());
4226 uint64_t mask64 = packBitVec(mask, i);
4227 if (mask64 == 0) {
4228 continue;
4231 if (i == 0) {
4232 nParams = cns(64);
4233 } else if (i == 64) {
4234 nParams = gen(LdRaw, RawMemData{RawMemData::FuncNumParams}, funcPtr);
4237 uint64_t vals64 = packBitVec(vals, i);
4238 gen(
4239 GuardRefs,
4240 funcPtr,
4241 nParams,
4242 cns(i),
4243 cns(mask64),
4244 cns(vals64)
4249 void HhbcTranslator::endGuards() {
4250 gen(EndGuards);
4253 void HhbcTranslator::emitVerifyTypeImpl(int32_t id) {
4254 bool isReturnType = (id == HPHP::TypeConstraint::ReturnId);
4255 if (isReturnType && !RuntimeOption::EvalCheckReturnTypeHints) return;
4257 auto const ldgblExit = makePseudoMainExit();
4258 auto func = curFunc();
4259 auto const& tc = isReturnType ? func->returnTypeConstraint()
4260 : func->params()[id].typeConstraint();
4261 auto* val = isReturnType ? topR() : ldLoc(id, ldgblExit, DataTypeSpecific);
4262 assert(val->type().isBoxed() || val->type().notBoxed());
4263 if (val->type().isBoxed()) {
4264 val = gen(LdRef, val->type().innerType(), makeExit(), val);
4265 m_irb->constrainValue(val, DataTypeSpecific);
4267 auto const valType = val->type();
4269 if (!valType.isKnownDataType()) {
4270 if (!isReturnType) {
4271 // This is supposed to be impossible, but it does happen in a rare case
4272 // with the legacy region selector. Until it's figured out, punt in
4273 // release builds. t3412704
4274 assert_log(false,
4275 [&] {
4276 return folly::format("Bad type {} for local {}:\n\n{}\n",
4277 valType, id, m_irb->unit().toString()).str();
4280 emitInterpOne(0);
4281 return;
4284 if (tc.isTypeVar()) return;
4285 if (tc.isNullable() && valType.subtypeOf(Type::InitNull)) return;
4287 if (!isReturnType && tc.isArray() && !tc.isSoft() && !func->mustBeRef(id) &&
4288 valType <= Type::Obj) {
4289 PUNT(VerifyParamType-collectionToArray);
4290 return;
4292 if (tc.isCallable()) {
4293 if (isReturnType) {
4294 gen(VerifyRetCallable, makeCatch(), val);
4295 } else {
4296 gen(VerifyParamCallable, makeCatch(), val, cns(id));
4298 return;
4301 // For non-object guards, we rely on what we know from the tracelet
4302 // guards and never have to do runtime checks.
4303 if (!tc.isObjectOrTypeAlias()) {
4304 if (!tc.checkPrimitive(valType.toDataType())) {
4305 if (isReturnType) {
4306 gen(VerifyRetFail, makeCatch(), val);
4307 } else {
4308 gen(VerifyParamFail, makeCatch(), cns(id));
4311 return;
4313 // If val is not an object, it still might pass the type constraint
4314 // if the constraint is a typedef. For now we just interp that case.
4315 auto const typeName = tc.typeName();
4316 if (valType <= Type::Arr && interface_supports_array(typeName)) {
4317 return;
4319 if (valType <= Type::Str && interface_supports_string(typeName)) {
4320 return;
4322 if (valType <= Type::Int && interface_supports_int(typeName)) {
4323 return;
4325 if (valType <= Type::Dbl && interface_supports_double(typeName)) {
4326 return;
4328 if (!(valType <= Type::Obj)) {
4329 if (tc.isObjectOrTypeAlias()
4330 && RuntimeOption::RepoAuthoritative
4331 && !tc.isCallable()
4332 && tc.isPrecise()) {
4333 auto const td = tc.namedEntity()->getCachedTypeAlias();
4334 if (tc.namedEntity()->isPersistentTypeAlias() && td) {
4335 if ((td->nullable && valType <= Type::Null)
4336 || td->kind == KindOfAny
4337 || equivDataTypes(td->kind, valType.toDataType())) {
4338 m_irb->constrainValue(val, TypeConstraint(DataTypeSpecific));
4339 return;
4343 emitInterpOne(0);
4344 return;
4347 const StringData* clsName;
4348 const Class* knownConstraint = nullptr;
4349 if (!tc.isSelf() && !tc.isParent()) {
4350 clsName = tc.typeName();
4351 knownConstraint = Unit::lookupClass(clsName);
4352 } else {
4353 if (tc.isSelf()) {
4354 tc.selfToClass(curFunc(), &knownConstraint);
4355 } else if (tc.isParent()) {
4356 tc.parentToClass(curFunc(), &knownConstraint);
4358 if (knownConstraint) {
4359 clsName = knownConstraint->preClass()->name();
4360 } else {
4361 // The hint was self or parent and there's no corresponding
4362 // class for the current func. This typehint will always fail.
4363 if (isReturnType) {
4364 gen(VerifyRetFail, makeCatch(), val);
4365 } else {
4366 gen(VerifyParamFail, makeCatch(), cns(id));
4368 return;
4371 assert(clsName);
4373 // We can only burn in the Class* if it's unique or in the
4374 // inheritance hierarchy of our context. It's ok if the class isn't
4375 // defined yet - all paths below are tolerant of a null constraint.
4376 if (!classIsUniqueOrCtxParent(knownConstraint)) knownConstraint = nullptr;
4379 * If the local is a specialized object type and we don't have to constrain a
4380 * guard to get it, we can avoid emitting runtime checks if we know the thing
4381 * would pass. If we don't know, we still have to emit them because valType
4382 * might be a subtype of its specialized object type.
4384 if (valType < Type::Obj) {
4385 auto const cls = valType.getClass();
4386 if (!m_irb->constrainValue(val, TypeConstraint(cls).setWeak()) &&
4387 ((knownConstraint && cls->classof(knownConstraint)) ||
4388 cls->name()->isame(clsName))) {
4389 return;
4393 InstanceBits::init();
4394 bool haveBit = InstanceBits::lookup(clsName) != 0;
4395 SSATmp* constraint = knownConstraint ? cns(knownConstraint)
4396 : gen(LdClsCachedSafe, cns(clsName));
4397 SSATmp* objClass = gen(LdObjClass, val);
4398 if (haveBit || classIsUniqueNormalClass(knownConstraint)) {
4399 SSATmp* isInstance = haveBit
4400 ? gen(InstanceOfBitmask, objClass, cns(clsName))
4401 : gen(ExtendsClass, objClass, constraint);
4402 m_irb->ifThen([&](Block* taken) {
4403 gen(JmpZero, taken, isInstance);
4405 [&] { // taken: the param type does not match
4406 m_irb->hint(Block::Hint::Unlikely);
4407 if (isReturnType) {
4408 gen(VerifyRetFail, makeCatch(), val);
4409 } else {
4410 gen(VerifyParamFail, makeCatch(), cns(id));
4414 } else {
4415 if (isReturnType) {
4416 gen(VerifyRetCls, makeCatch(), objClass, constraint,
4417 cns(uintptr_t(&tc)), val);
4418 } else {
4419 gen(VerifyParamCls, makeCatch(), objClass, constraint,
4420 cns(uintptr_t(&tc)), cns(id));
4425 void HhbcTranslator::emitVerifyRetTypeC() {
4426 emitVerifyTypeImpl(HPHP::TypeConstraint::ReturnId);
4429 void HhbcTranslator::emitVerifyRetTypeV() {
4430 emitVerifyTypeImpl(HPHP::TypeConstraint::ReturnId);
4433 void HhbcTranslator::emitVerifyParamType(int32_t paramId) {
4434 emitVerifyTypeImpl(paramId);
4437 const StaticString s_WaitHandle("WaitHandle");
4439 void HhbcTranslator::emitInstanceOfD(int classNameStrId) {
4440 const StringData* className = lookupStringId(classNameStrId);
4441 SSATmp* src = popC();
4444 * InstanceOfD is always false if it's not an object.
4446 * We're prepared to generate translations for known non-object
4447 * types, but if it's Gen/Cell we're going to PUNT because it's
4448 * natural to translate that case with control flow TODO(#2020251)
4450 if (Type::Obj.strictSubtypeOf(src->type())) {
4451 PUNT(InstanceOfD_MaybeObj);
4453 if (!src->isA(Type::Obj)) {
4454 bool res = ((src->isA(Type::Arr) && interface_supports_array(className))) ||
4455 (src->isA(Type::Str) && interface_supports_string(className)) ||
4456 (src->isA(Type::Int) && interface_supports_int(className)) ||
4457 (src->isA(Type::Dbl) && interface_supports_double(className));
4458 push(cns(res));
4459 gen(DecRef, src);
4460 return;
4463 if (s_WaitHandle.get()->isame(className)) {
4464 push(gen(IsWaitHandle, src));
4465 gen(DecRef, src);
4466 return;
4469 SSATmp* objClass = gen(LdObjClass, src);
4470 SSATmp* ssaClassName = cns(className);
4472 InstanceBits::init();
4473 const bool haveBit = InstanceBits::lookup(className) != 0;
4475 Class* const maybeCls = Unit::lookupUniqueClass(className);
4476 const bool isNormalClass = classIsUniqueNormalClass(maybeCls);
4477 const bool isUnique = classIsUnique(maybeCls);
4480 * If the class is a unique interface, we can just hit the class's
4481 * interfaces map and call it a day.
4483 if (!haveBit && classIsUniqueInterface(maybeCls)) {
4484 push(gen(InstanceOfIface, objClass, ssaClassName));
4485 gen(DecRef, src);
4486 return;
4490 * If the class is unique or a parent of the current context, we
4491 * don't need to load it out of RDS because it must already exist
4492 * and be defined.
4494 * Otherwise, we only use LdClsCachedSafe---instanceof with an
4495 * undefined class doesn't invoke autoload.
4497 SSATmp* checkClass =
4498 isUnique || (maybeCls && curClass() && curClass()->classof(maybeCls))
4499 ? cns(maybeCls)
4500 : gen(LdClsCachedSafe, ssaClassName);
4502 push(
4503 haveBit ? gen(InstanceOfBitmask, objClass, ssaClassName)
4504 : isUnique && isNormalClass ? gen(ExtendsClass, objClass, checkClass)
4505 : gen(InstanceOf, objClass, checkClass)
4507 gen(DecRef, src);
4510 void HhbcTranslator::emitInstanceOf() {
4511 auto const t1 = popC();
4512 auto const t2 = popC(); // t2 instanceof t1
4514 if (t1->isA(Type::Obj) && t2->isA(Type::Obj)) {
4515 auto const c2 = gen(LdObjClass, t2);
4516 auto const c1 = gen(LdObjClass, t1);
4517 push(gen(InstanceOf, c2, c1));
4518 gen(DecRef, t2);
4519 gen(DecRef, t1);
4520 return;
4523 if (!t1->isA(Type::Str)) PUNT(InstanceOf-NotStr);
4525 if (t2->isA(Type::Obj)) {
4526 auto const rds = gen(LookupClsRDSHandle, t1);
4527 auto const c1 = gen(DerefClsRDSHandle, rds);
4528 auto const c2 = gen(LdObjClass, t2);
4529 push(gen(InstanceOf, c2, c1));
4530 gen(DecRef, t2);
4531 gen(DecRef, t1);
4532 return;
4535 push(
4536 t2->isA(Type::Arr) ? gen(InterfaceSupportsArr, t1) :
4537 t2->isA(Type::Int) ? gen(InterfaceSupportsInt, t1) :
4538 t2->isA(Type::Str) ? gen(InterfaceSupportsStr, t1) :
4539 t2->isA(Type::Dbl) ? gen(InterfaceSupportsDbl, t1) :
4540 cns(false)
4542 gen(DecRef, t2);
4543 gen(DecRef, t1);
4546 void HhbcTranslator::emitCastArray() {
4547 // Turns the castArray BC operation into a type specialized
4548 // IR operation. The IR operation might end up being simplified
4549 // into a constant, but if not, it simply turns into a helper
4550 // call when translated to machine code. The main benefit from
4551 // separate IR instructions is that they can have different flags,
4552 // principally to distinguish the instructions that (may) hold on to a
4553 // reference to argument, from instructions that do not.
4555 // In the future, if this instruction occurs in a hot trace,
4556 // it might be better to expand it into a series of primitive
4557 // IR instructions so that the object allocation is exposed to
4558 // the optimizer and becomes eligible for removal if it does not
4559 // escape the trace.
4561 auto catchBlock = makeCatch();
4562 SSATmp* src = popC();
4563 if (src->isA(Type::Arr)) {
4564 push(src);
4565 } else if (src->isA(Type::Null)) {
4566 push(cns(staticEmptyArray()));
4567 } else if (src->isA(Type::Bool)) {
4568 push(gen(ConvBoolToArr, src));
4569 } else if (src->isA(Type::Dbl)) {
4570 push(gen(ConvDblToArr, src));
4571 } else if (src->isA(Type::Int)) {
4572 push(gen(ConvIntToArr, src));
4573 } else if (src->isA(Type::Str)) {
4574 push(gen(ConvStrToArr, src));
4575 } else if (src->isA(Type::Obj)) {
4576 push(gen(ConvObjToArr, catchBlock, src));
4577 } else {
4578 push(gen(ConvCellToArr, catchBlock, src));
4582 void HhbcTranslator::emitCastBool() {
4583 auto const src = popC();
4584 push(gen(ConvCellToBool, src));
4585 gen(DecRef, src);
4588 void HhbcTranslator::emitCastDouble() {
4589 auto const catchBlock = makeCatch();
4590 auto const src = popC();
4591 push(gen(ConvCellToDbl, catchBlock, src));
4592 gen(DecRef, src);
4595 void HhbcTranslator::emitCastInt() {
4596 auto const catchBlock = makeCatch();
4597 auto const src = popC();
4598 push(gen(ConvCellToInt, catchBlock, src));
4599 gen(DecRef, src);
4602 void HhbcTranslator::emitCastObject() {
4603 SSATmp* src = popC();
4604 if (src->isA(Type::Obj)) {
4605 push(src);
4606 } else {
4607 push(gen(ConvCellToObj, src));
4611 void HhbcTranslator::emitCastString() {
4612 auto const catchBlock = makeCatch();
4613 auto const src = popC();
4614 push(gen(ConvCellToStr, catchBlock, src));
4615 gen(DecRef, src);
4618 static bool isSupportedAGet(SSATmp* classSrc) {
4619 return (classSrc->isA(Type::Obj) || classSrc->isA(Type::Str));
4622 void HhbcTranslator::emitAGet(SSATmp* classSrc, Block* catchBlock) {
4623 if (classSrc->isA(Type::Str)) {
4624 push(gen(LdCls, catchBlock, classSrc, cns(curClass())));
4625 } else if (classSrc->isA(Type::Obj)) {
4626 push(gen(LdObjClass, classSrc));
4627 } else {
4628 not_reached();
4632 void HhbcTranslator::emitAGetC() {
4633 auto const name = topC();
4634 if (isSupportedAGet(name)) {
4635 auto catchBlock = makeCatch();
4636 popC();
4637 emitAGet(name, catchBlock);
4638 gen(DecRef, name);
4639 } else {
4640 emitInterpOne(Type::Cls, 1);
4644 void HhbcTranslator::emitAGetL(int id) {
4645 auto const ldrefExit = makeExit();
4646 auto const ldgblExit = makePseudoMainExit();
4648 auto const src = ldLocInner(id, ldrefExit, ldgblExit, DataTypeSpecific);
4649 if (isSupportedAGet(src)) {
4650 emitAGet(src, makeCatch());
4651 } else {
4652 PUNT(AGetL); // need to teach interpone about local uses
4656 void HhbcTranslator::emitBindMem(SSATmp* ptr, SSATmp* src) {
4657 SSATmp* prevValue = gen(LdMem, ptr->type().deref(), ptr, cns(0));
4659 pushIncRef(src);
4660 gen(StMem, ptr, cns(0), src);
4661 gen(DecRef, prevValue);
4664 void HhbcTranslator::emitEmptyMem(SSATmp* ptr) {
4665 SSATmp* ld = gen(LdMem, Type::Cell, gen(UnboxPtr, ptr), cns(0));
4666 push(gen(XorBool, gen(ConvCellToBool, ld), cns(true)));
4669 void HhbcTranslator::destroyName(SSATmp* name) {
4670 assert(name == topC());
4671 popDecRef(name->type());
4674 SSATmp* HhbcTranslator::ldClsPropAddr(Block* catchBlock,
4675 SSATmp* ssaCls,
4676 SSATmp* ssaName,
4677 bool raise) {
4679 * We can use LdClsPropAddrKnown if either we know which property it is and
4680 * that it is visible && accessible, or we know it is a property on this
4681 * class itself.
4683 bool const sPropKnown = [&] {
4684 if (!ssaName->isConst()) return false;
4685 auto const propName = ssaName->strVal();
4687 if (!ssaCls->isConst()) return false;
4688 auto const cls = ssaCls->clsVal();
4689 if (!classIsPersistentOrCtxParent(cls)) return false;
4691 bool visible, accessible;
4692 cls->findSProp(curClass(), propName, visible, accessible);
4693 return visible && accessible;
4694 }();
4696 if (sPropKnown) {
4697 auto const cls = ssaCls->clsVal();
4699 auto const repoTy = [&] {
4700 if (!RuntimeOption::RepoAuthoritative) return RepoAuthType{};
4701 auto const slot = cls->lookupSProp(ssaName->strVal());
4702 return cls->staticPropRepoAuthType(slot);
4703 }();
4705 auto const ptrTy = convertToType(repoTy).ptr();
4707 emitInitSProps(cls, catchBlock);
4708 return gen(LdClsPropAddrKnown, ptrTy, ssaCls, ssaName);
4711 if (raise) {
4712 return gen(LdClsPropAddrOrRaise, catchBlock,
4713 ssaCls, ssaName, cns(curClass()));
4714 } else {
4715 return gen(LdClsPropAddrOrNull, catchBlock,
4716 ssaCls, ssaName, cns(curClass()));
4720 void HhbcTranslator::emitCGetS() {
4721 auto const catchBlock = makeCatch();
4722 auto const ssaPropName = topC(1);
4724 if (!ssaPropName->isA(Type::Str)) {
4725 PUNT(CGetS-PropNameNotString);
4728 auto const ssaCls = popA();
4729 auto const propAddr = ldClsPropAddr(catchBlock, ssaCls, ssaPropName, true);
4730 auto const unboxed = gen(UnboxPtr, propAddr);
4731 auto const ldMem = gen(LdMem, unboxed->type().deref(), unboxed, cns(0));
4733 destroyName(ssaPropName);
4734 pushIncRef(ldMem);
4737 void HhbcTranslator::emitSetS() {
4738 auto const catchBlock = makeCatch();
4739 auto const ssaPropName = topC(2);
4741 if (!ssaPropName->isA(Type::Str)) {
4742 PUNT(SetS-PropNameNotString);
4745 auto const value = popC(DataTypeCountness);
4746 auto const ssaCls = popA();
4747 auto const propAddr = ldClsPropAddr(catchBlock, ssaCls, ssaPropName, true);
4748 auto const ptr = gen(UnboxPtr, propAddr);
4750 destroyName(ssaPropName);
4751 emitBindMem(ptr, value);
4754 void HhbcTranslator::emitVGetS() {
4755 auto const catchBlock = makeCatch();
4756 auto const ssaPropName = topC(1);
4758 if (!ssaPropName->isA(Type::Str)) {
4759 PUNT(VGetS-PropNameNotString);
4762 auto const ssaCls = popA();
4763 auto const propAddr = ldClsPropAddr(catchBlock, ssaCls, ssaPropName, true);
4765 destroyName(ssaPropName);
4766 pushIncRef(gen(LdMem, Type::BoxedCell, gen(BoxPtr, propAddr), cns(0)));
4769 void HhbcTranslator::emitBindS() {
4770 auto const catchBlock = makeCatch();
4771 auto const ssaPropName = topC(2);
4773 if (!ssaPropName->isA(Type::Str)) {
4774 PUNT(BindS-PropNameNotString);
4777 auto const value = popV();
4778 auto const ssaCls = popA();
4779 auto const propAddr = ldClsPropAddr(catchBlock, ssaCls, ssaPropName, true);
4781 destroyName(ssaPropName);
4782 emitBindMem(propAddr, value);
4785 void HhbcTranslator::emitIssetS() {
4786 auto const catchBlock = makeCatch();
4788 auto const ssaPropName = topC(1);
4789 if (!ssaPropName->isA(Type::Str)) {
4790 PUNT(IssetS-PropNameNotString);
4792 auto const ssaCls = popA();
4794 auto const ret = m_irb->cond(
4796 [&] (Block* taken) {
4797 auto propAddr = ldClsPropAddr(catchBlock, ssaCls, ssaPropName, false);
4798 return gen(CheckNonNull, taken, propAddr);
4800 [&] (SSATmp* ptr) { // Next: property or global exists
4801 return gen(IsNTypeMem, Type::Null, gen(UnboxPtr, ptr));
4803 [&] { // Taken: LdClsPropAddr* returned Nullptr because it isn't defined
4804 return cns(false);
4807 destroyName(ssaPropName);
4808 push(ret);
4811 void HhbcTranslator::emitEmptyS() {
4812 auto const catchBlock = makeCatch();
4814 auto const ssaPropName = topC(1);
4815 if (!ssaPropName->isA(Type::Str)) {
4816 PUNT(EmptyS-PropNameNotString);
4819 auto const ssaCls = popA();
4820 auto const ret = m_irb->cond(
4822 [&] (Block* taken) {
4823 auto propAddr = ldClsPropAddr(catchBlock, ssaCls, ssaPropName, false);
4824 return gen(CheckNonNull, taken, propAddr);
4826 [&] (SSATmp* ptr) {
4827 auto const unbox = gen(UnboxPtr, ptr);
4828 auto const val = gen(LdMem, unbox->type().deref(), unbox, cns(0));
4829 return gen(XorBool, gen(ConvCellToBool, val), cns(true));
4831 [&] { // Taken: LdClsPropAddr* returned Nullptr because it isn't defined
4832 return cns(true);
4835 destroyName(ssaPropName);
4836 push(ret);
4839 void HhbcTranslator::emitCGetG() {
4840 auto const exit = makeExitSlow();
4841 auto const name = topC();
4842 if (!name->isA(Type::Str)) PUNT(CGetG-NonStrName);
4843 auto const ptr = gen(LdGblAddr, exit, name);
4844 destroyName(name);
4845 pushIncRef(gen(LdMem, Type::Cell, gen(UnboxPtr, ptr), cns(0)));
4848 void HhbcTranslator::emitVGetG() {
4849 auto const name = topC();
4850 if (!name->isA(Type::Str)) PUNT(VGetG-NonStrName);
4851 auto const ptr = gen(LdGblAddrDef, name);
4852 destroyName(name);
4853 pushIncRef(gen(LdMem, Type::BoxedCell, gen(BoxPtr, ptr), cns(0)));
4856 void HhbcTranslator::emitBindG() {
4857 auto const name = topC(1);
4858 if (!name->isA(Type::Str)) PUNT(BindG-NameNotStr);
4859 auto const box = popV();
4860 auto const ptr = gen(LdGblAddrDef, name);
4861 destroyName(name);
4862 emitBindMem(ptr, box);
4865 void HhbcTranslator::emitSetG() {
4866 auto const name = topC(1);
4867 if (!name->isA(Type::Str)) PUNT(SetG-NameNotStr);
4868 auto const value = popC(DataTypeCountness);
4869 auto const unboxed = gen(UnboxPtr, gen(LdGblAddrDef, name));
4870 destroyName(name);
4871 emitBindMem(unboxed, value);
4874 void HhbcTranslator::emitIssetG() {
4875 auto const name = topC(0);
4876 if (!name->isA(Type::Str)) PUNT(IssetG-NameNotStr);
4878 auto const ret = m_irb->cond(
4880 [&] (Block* taken) {
4881 return gen(LdGblAddr, taken, name);
4883 [&] (SSATmp* ptr) { // Next: global exists
4884 return gen(IsNTypeMem, Type::Null, gen(UnboxPtr, ptr));
4886 [&] { // Taken: global doesn't exist
4887 return cns(false);
4889 destroyName(name);
4890 push(ret);
4893 void HhbcTranslator::emitEmptyG() {
4894 auto const name = topC();
4895 if (!name->isA(Type::Str)) PUNT(EmptyG-NameNotStr);
4897 auto const ret = m_irb->cond(
4899 [&] (Block* taken) {
4900 return gen(LdGblAddr, taken, name);
4902 [&] (SSATmp* ptr) { // Next: global exists
4903 auto const unboxed = gen(UnboxPtr, ptr);
4904 auto const val = gen(LdMem, Type::Cell, unboxed, cns(0));
4905 return gen(XorBool, gen(ConvCellToBool, val), cns(true));
4907 [&] { // Taken: global doesn't exist
4908 return cns(true);
4910 destroyName(name);
4911 push(ret);
4914 void HhbcTranslator::emitBinaryBitOp(Op op) {
4915 Type type2 = topC(0)->type();
4916 Type type1 = topC(1)->type();
4918 if (!areBinaryArithTypesSupported(op, type1, type2)) {
4919 PUNT(BunaryBitOp-Unsupported);
4920 return;
4923 SSATmp* src2 = promoteBool(popC());
4924 SSATmp* src1 = promoteBool(popC());
4925 push(gen(bitOp(op), src1, src2));
4928 void HhbcTranslator::emitBinaryArith(Op op) {
4929 Type type2 = topC(0)->type();
4930 Type type1 = topC(1)->type();
4932 if (!areBinaryArithTypesSupported(op, type1, type2)) {
4933 // either an int or a dbl, but can't tell
4934 PUNT(BinaryArith-Unsupported);
4935 return;
4938 auto spillValues = peekSpillValues();
4939 SSATmp* src2 = promoteBool(popC());
4940 SSATmp* src1 = promoteBool(popC());
4941 Opcode opc = promoteBinaryDoubles(op, src1, src2);
4943 if (opc == AddIntO || opc == SubIntO || opc == MulIntO) {
4944 assert(src1->isA(Type::Int) && src2->isA(Type::Int));
4946 auto const exit = makeExitImpl(
4947 bcOff(),
4948 ExitFlag::Interp,
4949 spillValues,
4950 CustomExit{}
4953 push(gen(opc, exit, src1, src2));
4954 } else {
4955 push(gen(opc, src1, src2));
4959 void HhbcTranslator::emitNot() {
4960 SSATmp* src = popC();
4961 push(gen(XorBool, gen(ConvCellToBool, src), cns(true)));
4962 gen(DecRef, src);
4965 void HhbcTranslator::emitFloor() {
4966 // need SSE 4.1 support to use roundsd
4967 if (!folly::CpuId().sse41()) {
4968 PUNT(Floor);
4971 auto catchBlock = makeCatch();
4972 auto val = popC();
4973 auto dblVal = gen(ConvCellToDbl, catchBlock, val);
4974 gen(DecRef, val);
4975 push(gen(Floor, dblVal));
4978 void HhbcTranslator::emitCeil() {
4979 // need SSE 4.1 support to use roundsd
4980 if (!folly::CpuId().sse41()) {
4981 PUNT(Ceil);
4984 auto catchBlock = makeCatch();
4985 auto val = popC();
4986 auto dblVal = gen(ConvCellToDbl, catchBlock, val);
4987 gen(DecRef, val);
4988 push(gen(Ceil, dblVal));
4991 void HhbcTranslator::emitCheckProp(Id propId) {
4992 StringData* propName = lookupStringId(propId);
4994 auto* cctx = gen(LdCctx, m_irb->fp());
4995 auto* cls = gen(LdClsCtx, cctx);
4996 auto* propInitVec = gen(LdClsInitData, cls);
4998 auto* ctx = curClass();
4999 auto idx = ctx->lookupDeclProp(propName);
5001 auto* curVal = gen(LdElem, propInitVec, cns(idx * sizeof(TypedValue)));
5002 push(gen(IsNType, Type::Uninit, curVal));
5005 void HhbcTranslator::emitInitProp(Id propId, InitPropOp op) {
5006 StringData* propName = lookupStringId(propId);
5007 SSATmp* val = popC();
5009 auto* ctx = curClass();
5011 SSATmp* base;
5012 Slot idx = 0;
5014 switch(op) {
5015 case InitPropOp::Static:
5016 // For sinit, the context class is always the same as the late-bound
5017 // class, so we can just use curClass().
5018 base = gen(LdClsPropAddrKnown, Type::PtrToCell, cns(ctx), cns(propName));
5019 break;
5021 case InitPropOp::NonStatic: {
5022 // The above is not the case for pinit, so we need to load.
5023 auto* cctx = gen(LdCctx, m_irb->fp());
5024 auto* cls = gen(LdClsCtx, cctx);
5026 base = gen(LdClsInitData, cls);
5027 idx = ctx->lookupDeclProp(propName);
5028 } break;
5031 gen(StElem, base, cns(idx * sizeof(TypedValue)), val);
5034 void HhbcTranslator::emitSilence(Id localId, unsigned char ucsubop) {
5035 SilenceOp subop = static_cast<SilenceOp>(ucsubop);
5036 switch (subop) {
5037 case SilenceOp::Start: {
5038 // We assume that whatever is in the local is dead and doesn't need to be
5039 // refcounted before being overwritten.
5040 gen(AssertLoc, Type::Uncounted, LocalId(localId), m_irb->fp());
5041 auto level = gen(ZeroErrorLevel);
5042 gen(StLoc, LocalId(localId), m_irb->fp(), level);
5043 break;
5045 case SilenceOp::End: {
5046 gen(AssertLoc, Type::Int, LocalId(localId), m_irb->fp());
5047 auto level = ldLoc(localId, makeExit(), DataTypeGeneric);
5048 gen(RestoreErrorLevel, level);
5049 break;
5056 * Note: this is currently separate from convertToType(RepoAuthType)
5057 * for now, just because we don't want to enable every single type for
5058 * assertions yet.
5060 * (Some of them currently regress performance, presumably because the
5061 * IR doesn't always handle the additional type information very well.
5062 * It is possibly a compile-time slowdown only, but we haven't
5063 * investigated yet.)
5065 folly::Optional<Type> HhbcTranslator::ratToAssertType(RepoAuthType rat) const {
5066 using T = RepoAuthType::Tag;
5067 switch (rat.tag()) {
5068 case T::Uninit: return Type::Uninit;
5069 case T::InitNull: return Type::InitNull;
5070 case T::Int: return Type::Int;
5071 case T::Dbl: return Type::Dbl;
5072 case T::Res: return Type::Res;
5073 case T::Null: return Type::Null;
5074 case T::Bool: return Type::Bool;
5075 case T::Str: return Type::Str;
5076 case T::Obj: return Type::Obj;
5077 case T::SStr: return Type::StaticStr;
5079 // These aren't enabled yet:
5080 case T::OptInt:
5081 case T::OptObj:
5082 case T::OptDbl:
5083 case T::OptBool:
5084 case T::OptSStr:
5085 case T::OptStr:
5086 case T::OptRes:
5087 return folly::none;
5089 case T::OptSArr:
5090 case T::OptArr:
5091 // TODO(#4205897): optional array types.
5092 return folly::none;
5094 case T::SArr:
5095 if (auto const arr = rat.array()) {
5096 return Type::StaticArr.specialize(arr);
5098 return Type::StaticArr;
5099 case T::Arr:
5100 if (auto const arr = rat.array()) {
5101 return Type::Arr.specialize(arr);
5103 return Type::Arr;
5105 case T::OptExactObj:
5106 case T::OptSubObj:
5107 case T::ExactObj:
5108 case T::SubObj:
5110 auto ty = [&] {
5111 auto const cls = Unit::lookupUniqueClass(rat.clsName());
5112 return classIsPersistentOrCtxParent(cls)
5113 ? Type::Obj.specialize(cls)
5114 : Type::Obj;
5115 }();
5116 if (rat.tag() == T::OptExactObj || rat.tag() == T::OptSubObj) {
5117 ty = ty | Type::InitNull;
5119 return ty;
5122 // We always know this at JIT time right now.
5123 case T::Cell:
5124 case T::Ref:
5125 return folly::none;
5127 case T::InitGen:
5128 // Should ideally be able to remove Uninit here.
5129 return folly::none;
5130 case T::Gen:
5131 return folly::none;
5133 // The JIT can't currently handle the exact information in these
5134 // type assertions in some cases:
5135 case T::InitUnc: return folly::none;
5136 case T::Unc: return folly::none;
5137 case T::InitCell: return Type::Cell; // - Type::Uninit
5139 not_reached();
5142 void HhbcTranslator::emitAssertRATL(int32_t loc, RepoAuthType rat) {
5143 if (auto const t = ratToAssertType(rat)) {
5144 assertTypeLocal(loc, *t);
5148 void HhbcTranslator::emitAssertRATStk(int32_t offset, RepoAuthType rat) {
5149 if (auto const t = ratToAssertType(rat)) {
5150 assertTypeStack(offset, *t);
5154 void HhbcTranslator::emitAbs() {
5155 auto value = popC();
5157 if (value->isA(Type::Int)) {
5158 // compute integer absolute value ((src>>63) ^ src) - (src>>63)
5159 auto t1 = gen(Shr, value, cns(63));
5160 auto t2 = gen(XorInt, t1, value);
5161 push(gen(SubInt, t2, t1));
5162 return;
5165 if (value->isA(Type::Dbl)) {
5166 push(gen(AbsDbl, value));
5167 return;
5170 if (value->isA(Type::Arr)) {
5171 gen(DecRef, value);
5172 push(cns(false));
5173 return;
5176 PUNT(Abs);
5179 #define AOP(OP, OPI, OPD) \
5180 void HhbcTranslator::emit ## OP() { emitBinaryArith(Op::OP); }
5181 BINARY_ARITH
5182 #undef AOP
5184 #define BOP(OP, OPI) \
5185 void HhbcTranslator::emit ## OP() { emitBinaryBitOp(Op::OP); }
5186 BINARY_BITOP
5187 #undef BOP
5189 void HhbcTranslator::emitDiv() {
5190 auto divisorType = topC(0)->type();
5191 auto dividendType = topC(1)->type();
5193 auto isNumeric = [&] (Type type) {
5194 return type.subtypeOfAny(Type::Int, Type::Dbl, Type::Bool);
5197 // not going to bother with string division etc.
5198 if (!isNumeric(divisorType) || !isNumeric(dividendType)) {
5199 emitInterpOne(Type::UncountedInit, 2);
5200 return;
5203 auto divisor = topC(0);
5204 auto dividend = topC(1);
5206 // we can't codegen this but we may be able to special case it away
5207 if (!divisor->isA(Type::Dbl) && !dividend->isA(Type::Dbl)) {
5208 // TODO(#2570625): support integer-integer division, move this to simlifier:
5209 if (divisor->isConst()) {
5210 int64_t divisorVal;
5211 if (divisor->isA(Type::Int)) {
5212 divisorVal = divisor->intVal();
5213 } else {
5214 assert(divisor->isA(Type::Bool));
5215 divisorVal = divisor->boolVal();
5218 if (divisorVal == 0) {
5219 auto catchBlock = makeCatch();
5220 popC();
5221 popC();
5222 gen(RaiseWarning, catchBlock,
5223 cns(makeStaticString(Strings::DIVISION_BY_ZERO)));
5224 push(cns(false));
5225 return;
5228 if (dividend->isConst()) {
5229 int64_t dividendVal;
5230 if (dividend->isA(Type::Int)) {
5231 dividendVal = dividend->intVal();
5232 } else {
5233 assert(dividend->isA(Type::Bool));
5234 dividendVal = dividend->boolVal();
5236 popC();
5237 popC();
5238 if (dividendVal == LLONG_MIN || dividendVal % divisorVal) {
5239 push(cns((double)dividendVal / divisorVal));
5240 } else {
5241 push(cns(dividendVal / divisorVal));
5243 return;
5245 /* fall through */
5247 emitInterpOne(Type::UncountedInit, 2);
5248 return;
5251 auto make_double = [&] (SSATmp* src) {
5252 if (src->isA(Type::Int)) {
5253 return gen(ConvIntToDbl, src);
5254 } else if (src->isA(Type::Bool)) {
5255 return gen(ConvBoolToDbl, src);
5257 assert(src->isA(Type::Dbl));
5258 return src;
5261 divisor = make_double(popC());
5262 dividend = make_double(popC());
5264 // on division by zero we spill false and exit with a warning
5265 auto exitSpillValues = peekSpillValues();
5266 exitSpillValues.push_back(cns(false));
5268 auto const exit = makeExitWarn(nextBcOff(), exitSpillValues,
5269 makeStaticString(Strings::DIVISION_BY_ZERO));
5271 assert(divisor->isA(Type::Dbl) && dividend->isA(Type::Dbl));
5272 push(gen(DivDbl, exit, dividend, divisor));
5275 void HhbcTranslator::emitMod() {
5276 auto catchBlock1 = makeCatch();
5277 auto catchBlock2 = makeCatch();
5278 SSATmp* btr = popC();
5279 SSATmp* btl = popC();
5280 SSATmp* tr = gen(ConvCellToInt, catchBlock1, btr);
5281 SSATmp* tl = gen(ConvCellToInt, catchBlock2, btl);
5283 // We only want to decref btr and btl if the ConvCellToInt operation gave us
5284 // a new value back.
5285 if (tr != btr) gen(DecRef, btr);
5286 if (tl != btl) gen(DecRef, btl);
5287 // Exit path spills an additional false
5288 auto exitSpillValues = peekSpillValues();
5289 exitSpillValues.push_back(cns(false));
5291 // Generate an exit for the rare case that r is zero. Interpreting
5292 // will raise a notice and produce the boolean false. Punch out
5293 // here and resume after the Mod instruction; this should be rare.
5294 auto const exit = makeExitWarn(nextBcOff(), exitSpillValues,
5295 makeStaticString(Strings::DIVISION_BY_ZERO));
5296 gen(JmpZero, exit, tr);
5298 // We unfortunately need to special-case r = -1 here. In two's
5299 // complement, trying to divide INT_MIN by -1 will cause an integer
5300 // overflow.
5301 if (tr->isConst()) {
5302 // This whole block only exists so m_irb->cond doesn't get mad when one
5303 // of the branches gets optimized out due to constant folding.
5304 if (tr->intVal() == -1LL) {
5305 push(cns(0));
5306 } else if (tr->intVal() == 0) {
5307 // mod by zero is undefined. don't emit opmod for it because
5308 // this could cause issues in simplifier/codegen
5309 // this should never get reached anyway, we just need to dump
5310 // something on the stack
5311 push(cns(false));
5312 } else {
5313 push(gen(Mod, tl, tr));
5315 return;
5318 // check for -1 (dynamic version)
5319 SSATmp *res = m_irb->cond(
5321 [&] (Block* taken) {
5322 SSATmp* negone = gen(Eq, tr, cns(-1));
5323 gen(JmpNZero, taken, negone);
5325 [&] {
5326 return gen(Mod, tl, tr);
5328 [&] {
5329 m_irb->hint(Block::Hint::Unlikely);
5330 return cns(0);
5332 push(res);
5335 void HhbcTranslator::emitPow() {
5336 emitInterpOne(Type::UncountedInit, 2);
5339 void HhbcTranslator::emitSqrt() {
5340 auto const srcType = topC()->type();
5341 if (srcType <= Type::Int) {
5342 auto const src = gen(ConvIntToDbl, popC());
5343 push(gen(Sqrt, src));
5344 return;
5347 if (srcType <= Type::Dbl) {
5348 auto const src = popC();
5349 push(gen(Sqrt, src));
5350 return;
5353 emitInterpOne(Type::UncountedInit, 1);
5356 void HhbcTranslator::emitBitNot() {
5357 auto const srcType = topC()->type();
5358 if (srcType <= Type::Int) {
5359 auto const src = popC();
5360 push(gen(XorInt, src, cns(-1)));
5361 return;
5364 if (srcType <= Type::Dbl) {
5365 auto const src = gen(ConvDblToInt, popC());
5366 push(gen(XorInt, src, cns(-1)));
5367 return;
5370 auto const resultType = srcType <= Type::Str ? Type::Str
5371 : srcType.needsReg() ? Type::Cell
5372 : Type::Int;
5373 emitInterpOne(resultType, 1);
5376 void HhbcTranslator::emitXor() {
5377 SSATmp* btr = popC();
5378 SSATmp* btl = popC();
5379 SSATmp* tr = gen(ConvCellToBool, btr);
5380 SSATmp* tl = gen(ConvCellToBool, btl);
5381 push(gen(XorBool, tl, tr));
5382 gen(DecRef, btl);
5383 gen(DecRef, btr);
5386 void HhbcTranslator::emitShl() {
5387 auto catch1 = makeCatch();
5388 auto catch2 = makeCatch();
5389 auto shiftAmount = popC();
5390 auto lhs = popC();
5392 auto lhsInt = gen(ConvCellToInt, catch1, lhs);
5393 auto shiftAmountInt = gen(ConvCellToInt, catch2, shiftAmount);
5395 push(gen(Shl, lhsInt, shiftAmountInt));
5396 gen(DecRef, lhs);
5397 gen(DecRef, shiftAmount);
5400 void HhbcTranslator::emitShr() {
5401 auto catch1 = makeCatch();
5402 auto catch2 = makeCatch();
5403 auto shiftAmount = popC();
5404 auto lhs = popC();
5406 auto lhsInt = gen(ConvCellToInt, catch1, lhs);
5407 auto shiftAmountInt = gen(ConvCellToInt, catch2, shiftAmount);
5409 push(gen(Shr, lhsInt, shiftAmountInt));
5410 gen(DecRef, lhs);
5411 gen(DecRef, shiftAmount);
5414 namespace {
5416 Type arithOpResult(Type t1, Type t2) {
5417 if (!t1.isKnownDataType() || !t2.isKnownDataType()) {
5418 return Type::Cell;
5421 auto both = t1 | t2;
5422 if (both.maybe(Type::Dbl)) return Type::Dbl;
5423 if (both.maybe(Type::Arr)) return Type::Arr;
5424 if (both.maybe(Type::Str)) return Type::Cell;
5425 return Type::Int;
5428 Type arithOpOverResult(Type t1, Type t2) {
5429 if (t1 <= Type::Int && t2 <= Type::Int) {
5430 return Type::Int | Type::Dbl;
5432 return arithOpResult(t1, t2);
5435 Type bitOpResult(Type t1, Type t2) {
5436 if (!t1.isKnownDataType() || !t2.isKnownDataType()) {
5437 return Type::Cell;
5440 auto both = t1 | t2;
5441 if (both <= Type::Str) return Type::Str;
5442 return Type::Int;
5445 Type setOpResult(Type locType, Type valType, SetOpOp op) {
5446 switch (op) {
5447 case SetOpOp::PlusEqual:
5448 case SetOpOp::MinusEqual:
5449 case SetOpOp::MulEqual: return arithOpResult(locType.unbox(), valType);
5450 case SetOpOp::PlusEqualO:
5451 case SetOpOp::MinusEqualO:
5452 case SetOpOp::MulEqualO: return arithOpOverResult(locType.unbox(), valType);
5453 case SetOpOp::ConcatEqual: return Type::Str;
5454 case SetOpOp::PowEqual:
5455 case SetOpOp::DivEqual:
5456 case SetOpOp::ModEqual: return Type::UncountedInit;
5457 case SetOpOp::AndEqual:
5458 case SetOpOp::OrEqual:
5459 case SetOpOp::XorEqual: return bitOpResult(locType.unbox(), valType);
5460 case SetOpOp::SlEqual:
5461 case SetOpOp::SrEqual: return Type::Int;
5463 not_reached();
5466 uint32_t localInputId(const NormalizedInstruction& inst) {
5467 switch (inst.op()) {
5468 case OpSetWithRefLM:
5469 case OpFPassL:
5470 return inst.imm[1].u_LA;
5472 default:
5473 return inst.imm[0].u_LA;
5479 folly::Optional<Type> HhbcTranslator::interpOutputType(
5480 const NormalizedInstruction& inst,
5481 folly::Optional<Type>& checkTypeType) const {
5482 using namespace JIT::InstrFlags;
5483 auto localType = [&]{
5484 auto locId = localInputId(inst);
5485 assert(locId >= 0 && locId < curFunc()->numLocals());
5486 return m_irb->localType(locId, DataTypeSpecific);
5488 auto boxed = [](Type t) {
5489 if (t.equals(Type::Gen)) return t;
5490 assert(t.isBoxed() || t.notBoxed());
5491 return t.isBoxed() ? t : boxType(t);
5494 if (inst.outputPredicted) return Type::Gen;
5496 auto outFlag = getInstrInfo(inst.op()).type;
5497 if (outFlag == OutFInputL) {
5498 outFlag = inst.preppedByRef ? OutVInputL : OutCInputL;
5499 } else if (outFlag == OutFInputR) {
5500 outFlag = inst.preppedByRef ? OutVInput : OutCInput;
5503 switch (outFlag) {
5504 case OutNull: return Type::InitNull;
5505 case OutNullUninit: return Type::Uninit;
5506 case OutString: return Type::Str;
5507 case OutStringImm: return Type::StaticStr;
5508 case OutDouble: return Type::Dbl;
5509 case OutIsTypeL:
5510 case OutBoolean:
5511 case OutPredBool:
5512 case OutBooleanImm: return Type::Bool;
5513 case OutInt64: return Type::Int;
5514 case OutArray: return Type::Arr;
5515 case OutArrayImm: return Type::Arr; // Should be StaticArr: t2124292
5516 case OutObject:
5517 case OutThisObject: return Type::Obj;
5518 case OutResource: return Type::Res;
5520 case OutFDesc: return folly::none;
5521 case OutUnknown: return Type::Gen;
5523 case OutPred:
5524 checkTypeType = inst.outPred;
5525 // Returning inst.outPred from this function would turn the CheckStk
5526 // after the InterpOne into a nop.
5527 return Type::Gen;
5529 case OutCns: return Type::Cell;
5530 case OutVUnknown: return Type::BoxedCell;
5532 case OutSameAsInput: return topType(0);
5533 case OutVInput: return boxed(topType(0));
5534 case OutVInputL: return boxed(localType());
5535 case OutFInputL:
5536 case OutFInputR: not_reached();
5538 case OutArith: return arithOpResult(topType(0), topType(1));
5539 case OutArithO: return arithOpOverResult(topType(0), topType(1));
5540 case OutBitOp:
5541 return bitOpResult(topType(0),
5542 inst.op() == HPHP::OpBitNot ? Type::Bottom
5543 : topType(1));
5544 case OutSetOp: return setOpResult(localType(), topType(0),
5545 SetOpOp(inst.imm[1].u_OA));
5546 case OutIncDec: {
5547 auto ty = localType().unbox();
5548 return ty <= Type::Dbl ? ty : Type::Cell;
5550 case OutStrlen:
5551 return topType(0) <= Type::Str ? Type::Int : Type::UncountedInit;
5552 case OutClassRef: return Type::Cls;
5553 case OutFPushCufSafe: return folly::none;
5555 case OutNone: return folly::none;
5557 case OutCInput: {
5558 auto ttype = topType(0);
5559 if (ttype.notBoxed()) return ttype;
5560 // All instructions that are OutCInput or OutCInputL cannot push uninit or
5561 // a ref, so only specific inner types need to be checked.
5562 if (ttype.unbox().strictSubtypeOf(Type::InitCell)) {
5563 checkTypeType = ttype.unbox();
5565 return Type::Cell;
5568 case OutCInputL: {
5569 auto ltype = localType();
5570 if (ltype.notBoxed()) return ltype;
5571 if (ltype.unbox().strictSubtypeOf(Type::InitCell)) {
5572 checkTypeType = ltype.unbox();
5574 return Type::Cell;
5577 not_reached();
5580 smart::vector<InterpOneData::LocalType>
5581 HhbcTranslator::interpOutputLocals(const NormalizedInstruction& inst,
5582 bool& smashesAllLocals,
5583 folly::Optional<Type> pushedType) {
5584 using namespace JIT::InstrFlags;
5585 if (!(getInstrInfo(inst.op()).out & Local)) return {};
5587 smart::vector<InterpOneData::LocalType> locals;
5588 auto setLocType = [&](uint32_t id, Type t) {
5589 // Relax the type for pseudomains so that we can actually guard on it.
5590 auto const type = inPseudoMain() ? t.relaxToGuardable() : t;
5591 locals.emplace_back(id, type);
5593 auto setImmLocType = [&](uint32_t id, Type t) {
5594 setLocType(inst.imm[id].u_LA, t);
5596 auto* func = curFunc();
5598 switch (inst.op()) {
5599 case OpSetN:
5600 case OpSetOpN:
5601 case OpIncDecN:
5602 case OpBindN:
5603 case OpVGetN:
5604 case OpUnsetN:
5605 smashesAllLocals = true;
5606 break;
5608 case OpSetOpL:
5609 case OpIncDecL: {
5610 assert(pushedType.hasValue());
5611 auto locType = m_irb->localType(localInputId(inst), DataTypeSpecific);
5612 assert(locType < Type::Gen);
5614 auto stackType = inst.outputPredicted ? inst.outPred : pushedType.value();
5615 setImmLocType(0, locType.isBoxed() ? stackType.box() : stackType);
5616 break;
5619 case OpStaticLocInit:
5620 setImmLocType(0, Type::BoxedCell);
5621 break;
5623 case OpInitThisLoc:
5624 setImmLocType(0, Type::Cell);
5625 break;
5627 case OpSetL: {
5628 auto locType = m_irb->localType(localInputId(inst), DataTypeSpecific);
5629 auto stackType = topType(0);
5630 // SetL preserves reffiness of a local.
5631 setImmLocType(0, locType.isBoxed() ? boxType(stackType) : stackType);
5632 break;
5634 case OpVGetL:
5635 case OpBindL: {
5636 assert(pushedType.hasValue());
5637 assert(pushedType->isBoxed());
5638 setImmLocType(0, pushedType.value());
5639 break;
5642 case OpUnsetL:
5643 case OpPushL:
5644 setImmLocType(0, Type::Uninit);
5645 break;
5647 case OpSetM:
5648 case OpSetOpM:
5649 case OpBindM:
5650 case OpVGetM:
5651 case OpSetWithRefLM:
5652 case OpSetWithRefRM:
5653 case OpUnsetM:
5654 case OpFPassM:
5655 case OpIncDecM:
5656 switch (inst.immVec.locationCode()) {
5657 case LL: {
5658 auto const& mii = getMInstrInfo(inst.mInstrOp());
5659 auto const& base = inst.inputs[mii.valCount()]->location;
5660 assert(base.space == Location::Local);
5662 // MInstrEffects expects to be used in the context of a normally
5663 // translated instruction, not an interpOne. The two important
5664 // differences are that the base is normally a PtrTo* and we need to
5665 // supply an IR opcode representing the operation. SetWithRefElem is
5666 // used instead of SetElem because SetElem makes a few assumptions
5667 // about side exits that interpOne won't do.
5668 auto const baseType = m_irb->localType(base.offset,
5669 DataTypeSpecific).ptr();
5670 auto const isUnset = inst.op() == OpUnsetM;
5671 auto const isProp = mcodeIsProp(inst.immVecM[0]);
5673 if (isUnset && isProp) break;
5674 auto op = isProp ? SetProp : isUnset ? UnsetElem : SetWithRefElem;
5675 MInstrEffects effects(op, baseType);
5676 if (effects.baseValChanged) {
5677 setLocType(base.offset, effects.baseType.deref());
5679 break;
5682 case LNL:
5683 case LNC:
5684 smashesAllLocals = true;
5685 break;
5687 default:
5688 break;
5690 break;
5692 case OpMIterInitK:
5693 case OpMIterNextK:
5694 setImmLocType(3, Type::Cell);
5695 case OpMIterInit:
5696 case OpMIterNext:
5697 setImmLocType(2, Type::BoxedCell);
5698 break;
5700 case OpIterInitK:
5701 case OpWIterInitK:
5702 case OpIterNextK:
5703 case OpWIterNextK:
5704 setImmLocType(3, Type::Cell);
5705 case OpIterInit:
5706 case OpWIterInit:
5707 case OpIterNext:
5708 case OpWIterNext:
5709 setImmLocType(2, Type::Gen);
5710 break;
5712 case OpVerifyParamType: {
5713 auto paramId = inst.imm[0].u_LA;
5714 auto const& tc = func->params()[paramId].typeConstraint();
5715 auto locType = m_irb->localType(localInputId(inst), DataTypeSpecific);
5716 if (tc.isArray() && !tc.isSoft() && !func->mustBeRef(paramId) &&
5717 (locType <= Type::Obj || locType.maybeBoxed())) {
5718 setImmLocType(0, locType.isBoxed() ? Type::BoxedCell : Type::Cell);
5720 break;
5723 case OpSilence:
5724 if (static_cast<SilenceOp>(inst.imm[0].u_OA) == SilenceOp::Start) {
5725 setImmLocType(inst.imm[0].u_LA, Type::Int);
5727 break;
5729 default:
5730 not_reached();
5733 return locals;
5736 void HhbcTranslator::emitInterpOne(const NormalizedInstruction& inst) {
5737 folly::Optional<Type> checkTypeType;
5738 auto stackType = interpOutputType(inst, checkTypeType);
5739 auto popped = getStackPopped(inst.pc());
5740 auto pushed = getStackPushed(inst.pc());
5741 FTRACE(1, "emitting InterpOne for {}, result = {}, popped {}, pushed {}\n",
5742 inst.toString(),
5743 stackType.hasValue() ? stackType->toString() : "<none>",
5744 popped, pushed);
5746 InterpOneData idata;
5747 auto locals = interpOutputLocals(inst, idata.smashesAllLocals, stackType);
5748 idata.nChangedLocals = locals.size();
5749 idata.changedLocals = locals.data();
5751 emitInterpOne(stackType, popped, pushed, idata);
5752 if (checkTypeType) {
5753 auto const out = getInstrInfo(inst.op()).out;
5754 auto const checkIdx = (out & InstrFlags::StackIns2) ? 2
5755 : (out & InstrFlags::StackIns1) ? 1
5756 : 0;
5757 checkTypeStack(checkIdx, *checkTypeType, inst.nextSk().offset());
5761 void HhbcTranslator::emitInterpOne(int popped) {
5762 InterpOneData idata;
5763 emitInterpOne(folly::none, popped, 0, idata);
5766 void HhbcTranslator::emitInterpOne(Type outType, int popped) {
5767 InterpOneData idata;
5768 emitInterpOne(outType, popped, 1, idata);
5771 void HhbcTranslator::emitInterpOne(folly::Optional<Type> outType, int popped,
5772 int pushed, InterpOneData& idata) {
5773 auto unit = curFunc()->unit();
5774 auto sp = spillStack();
5775 auto op = unit->getOpcode(bcOff());
5777 auto& iInfo = getInstrInfo(op);
5778 if (iInfo.type == JIT::InstrFlags::OutFDesc) {
5779 m_fpiStack.emplace(sp, m_irb->spOffset());
5780 } else if (isFCallStar(op) && !m_fpiStack.empty()) {
5781 m_fpiStack.pop();
5784 idata.bcOff = bcOff();
5785 idata.cellsPopped = popped;
5786 idata.cellsPushed = pushed;
5787 idata.opcode = op;
5789 auto const changesPC = opcodeChangesPC(idata.opcode);
5790 gen(changesPC ? InterpOneCF : InterpOne, outType,
5791 makeCatch(), idata, sp, m_irb->fp());
5792 assert(m_irb->stackDeficit() == 0);
5795 std::string HhbcTranslator::showStack() const {
5796 if (isInlining()) {
5797 return folly::format("{:*^80}\n",
5798 " I don't understand inlining stacks yet ").str();
5800 std::ostringstream out;
5801 auto header = [&](const std::string& str) {
5802 out << folly::format("+{:-^82}+\n", str);
5805 const int32_t frameCells = resumed() ? 0 : curFunc()->numSlotsInFrame();
5806 const int32_t stackDepth =
5807 m_irb->spOffset() + m_irb->evalStack().size()
5808 - m_irb->stackDeficit() - frameCells;
5809 auto spOffset = stackDepth;
5810 auto elem = [&](const std::string& str) {
5811 out << folly::format("| {:<80} |\n",
5812 folly::format("{:>2}: {}",
5813 stackDepth - spOffset, str));
5814 assert(spOffset > 0);
5815 --spOffset;
5818 auto fpi = curFunc()->findFPI(bcOff());
5819 auto checkFpi = [&]() {
5820 if (fpi && spOffset + frameCells == fpi->m_fpOff) {
5821 auto fpushOff = fpi->m_fpushOff;
5822 auto after = fpushOff + instrLen((Op*)curUnit()->at(fpushOff));
5823 std::ostringstream msg;
5824 msg << "ActRec from ";
5825 curUnit()->prettyPrint(msg, Unit::PrintOpts().range(fpushOff, after)
5826 .noLineNumbers()
5827 .indent(0)
5828 .noFuncs());
5829 auto msgStr = msg.str();
5830 assert(msgStr.back() == '\n');
5831 msgStr.erase(msgStr.size() - 1);
5832 for (unsigned i = 0; i < kNumActRecCells; ++i) elem(msgStr);
5833 fpi = fpi->m_parentIndex != -1 ? &curFunc()->fpitab()[fpi->m_parentIndex]
5834 : nullptr;
5835 return true;
5837 return false;
5840 header(folly::format(" {} stack element(s); m_evalStack: ",
5841 stackDepth).str());
5842 for (unsigned i = 0; i < m_irb->evalStack().size(); ++i) {
5843 while (checkFpi());
5844 SSATmp* value = top(DataTypeGeneric, i); // debug-only
5845 elem(value->inst()->toString());
5848 header(" in-memory ");
5849 for (unsigned i = m_irb->stackDeficit(); spOffset > 0; ) {
5850 assert(i < curFunc()->maxStackCells());
5851 if (checkFpi()) {
5852 i += kNumActRecCells;
5853 continue;
5856 auto stkVal = getStackValue(m_irb->sp(), i);
5857 std::ostringstream elemStr;
5858 if (stkVal.knownType == Type::StackElem) elem("unknown");
5859 else if (stkVal.value) elem(stkVal.value->inst()->toString());
5860 else elem(stkVal.knownType.toString());
5862 ++i;
5864 header("");
5865 out << "\n";
5867 header(folly::format(" {} local(s) ", curFunc()->numLocals()).str());
5868 for (unsigned i = 0; i < curFunc()->numLocals(); ++i) {
5869 auto localValue = m_irb->localValue(i, DataTypeGeneric);
5870 auto str = localValue
5871 ? localValue->inst()->toString()
5872 : m_irb->localType(i, DataTypeGeneric).toString();
5873 out << folly::format("| {:<80} |\n",
5874 folly::format("{:>2}: {}", i, str));
5876 header("");
5877 return out.str();
5881 * Get SSATmps representing all the information on the virtual eval
5882 * stack in preparation for a spill or exit trace. Top of stack will
5883 * be in the last element.
5885 * Doesn't actually remove these values from the eval stack.
5887 std::vector<SSATmp*> HhbcTranslator::peekSpillValues() const {
5888 std::vector<SSATmp*> ret;
5889 ret.reserve(m_irb->evalStack().size());
5890 for (int i = m_irb->evalStack().size(); i--; ) {
5891 // DataTypeGeneric is used here because SpillStack just teleports the
5892 // values to memory.
5893 SSATmp* elem = top(DataTypeGeneric, i);
5894 ret.push_back(elem);
5896 return ret;
5899 Block* HhbcTranslator::makeExit(Offset targetBcOff /* = -1 */) {
5900 auto spillValues = peekSpillValues();
5901 return makeExit(targetBcOff, spillValues);
5904 Block* HhbcTranslator::makeExit(Offset targetBcOff,
5905 std::vector<SSATmp*>& spillValues) {
5906 if (targetBcOff == -1) targetBcOff = bcOff();
5907 return makeExitImpl(targetBcOff, ExitFlag::JIT, spillValues, CustomExit{});
5910 Block* HhbcTranslator::makePseudoMainExit(Offset targetBcOff /* = -1 */) {
5911 return inPseudoMain() ? makeExit(targetBcOff) : nullptr;
5914 Block* HhbcTranslator::makeExitWarn(Offset targetBcOff,
5915 std::vector<SSATmp*>& spillValues,
5916 const StringData* warning) {
5917 assert(targetBcOff != -1);
5918 return makeExitImpl(targetBcOff, ExitFlag::JIT, spillValues,
5919 [&]() -> SSATmp* {
5920 gen(RaiseWarning, makeCatchNoSpill(), cns(warning));
5921 return nullptr;
5926 Block* HhbcTranslator::makeExitError(SSATmp* msg, Block* catchBlock) {
5927 auto exit = m_irb->makeExit();
5928 BlockPusher bp(*m_irb, m_irb->marker(), exit);
5929 gen(RaiseError, catchBlock, msg);
5930 return exit;
5933 Block* HhbcTranslator::makeExitNullThis() {
5934 return makeExitError(cns(makeStaticString(Strings::FATAL_NULL_THIS)),
5935 makeCatch());
5938 template<class ExitLambda>
5939 Block* HhbcTranslator::makeSideExit(Offset targetBcOff, ExitLambda exit) {
5940 auto spillValues = peekSpillValues();
5941 return makeExitImpl(targetBcOff, ExitFlag::DelayedMarker, spillValues, exit);
5944 Block* HhbcTranslator::makeExitSlow() {
5945 auto spillValues = peekSpillValues();
5946 return makeExitImpl(bcOff(), ExitFlag::Interp, spillValues, CustomExit{});
5949 Block* HhbcTranslator::makeExitOpt(TransID transId) {
5950 Offset targetBcOff = bcOff();
5951 auto const exit = m_irb->makeExit();
5953 BCMarker exitMarker {
5954 SrcKey{ curFunc(), targetBcOff, resumed() },
5955 static_cast<int32_t>(m_irb->spOffset() +
5956 m_irb->evalStack().size() - m_irb->stackDeficit()),
5957 m_profTransID
5960 BlockPusher blockPusher(*m_irb, exitMarker, exit);
5962 SSATmp* stack = nullptr;
5963 if (m_irb->stackDeficit() != 0 || !m_irb->evalStack().empty()) {
5964 stack = spillStack();
5965 } else {
5966 stack = m_irb->sp();
5969 gen(SyncABIRegs, m_irb->fp(), stack);
5970 gen(ReqRetranslateOpt, ReqRetransOptData(transId, targetBcOff));
5972 return exit;
5975 Block* HhbcTranslator::makeExitImpl(Offset targetBcOff, ExitFlag flag,
5976 std::vector<SSATmp*>& stackValues,
5977 const CustomExit& customFn) {
5978 Offset curBcOff = bcOff();
5979 BCMarker currentMarker = makeMarker(curBcOff);
5980 m_irb->evalStack().swap(stackValues);
5981 SCOPE_EXIT {
5982 m_bcStateStack.back().bcOff = curBcOff;
5983 m_irb->evalStack().swap(stackValues);
5986 BCMarker exitMarker = makeMarker(targetBcOff);
5988 auto const exit = m_irb->makeExit();
5989 BlockPusher tp(*m_irb,
5990 flag == ExitFlag::DelayedMarker ? currentMarker : exitMarker,
5991 exit);
5993 if (flag != ExitFlag::DelayedMarker) {
5994 m_bcStateStack.back().bcOff = targetBcOff;
5997 auto stack = spillStack();
5999 if (customFn) {
6000 stack = gen(ExceptionBarrier, stack);
6001 auto const customTmp = customFn();
6002 if (customTmp) {
6003 SSATmp* spill2[] = { stack, cns(0), customTmp };
6004 stack = gen(SpillStack,
6005 std::make_pair(sizeof spill2 / sizeof spill2[0], spill2)
6007 exitMarker.setSpOff(exitMarker.spOff() + 1);
6011 if (flag == ExitFlag::DelayedMarker) {
6012 m_irb->setMarker(exitMarker);
6013 m_bcStateStack.back().bcOff = targetBcOff;
6016 gen(SyncABIRegs, m_irb->fp(), stack);
6018 if (flag == ExitFlag::Interp) {
6019 auto interpSk = SrcKey {curFunc(), targetBcOff, resumed()};
6020 auto pc = curUnit()->at(targetBcOff);
6021 auto changesPC = opcodeChangesPC(*reinterpret_cast<const Op*>(pc));
6022 auto interpOp = changesPC ? InterpOneCF : InterpOne;
6024 InterpOneData idata;
6025 idata.bcOff = targetBcOff;
6026 idata.cellsPopped = getStackPopped(pc);
6027 idata.cellsPushed = getStackPushed(pc);
6028 idata.opcode = *reinterpret_cast<const Op*>(pc);
6030 // This is deliberately ignoring anything the opcode might output on the
6031 // stack -- this Unit is about to end.
6032 gen(interpOp, idata, makeCatchNoSpill(), stack, m_irb->fp());
6034 if (!changesPC) {
6035 // If the op changes PC, InterpOneCF handles getting to the right place
6036 gen(ReqBindJmp, BCOffset(interpSk.advanced().offset()));
6038 return exit;
6041 if (!isInlining() &&
6042 curBcOff == m_context.initBcOffset &&
6043 targetBcOff == m_context.initBcOffset) {
6044 // Note that if we're inlining, then targetBcOff is in the inlined
6045 // func, while context.initBcOffset is in the outer func, so
6046 // bindJmp will always work (and there's no guarantee that there
6047 // is an anchor translation, so we must not use ReqRetranslate).
6048 gen(ReqRetranslate);
6049 } else {
6050 gen(ReqBindJmp, BCOffset(targetBcOff));
6052 return exit;
6056 * Create a catch block with a user-defined body (usually empty or a
6057 * SpillStack). Regardless of what body() does, it must return the current
6058 * stack pointer. This is a block to be invoked by the unwinder while unwinding
6059 * through a call to C++ from translated code. When attached to an instruction
6060 * as its taken field, code will be generated and the block will be registered
6061 * with the unwinder automatically.
6063 template<typename Body>
6064 Block* HhbcTranslator::makeCatchImpl(Body body) {
6065 auto exit = m_irb->makeExit(Block::Hint::Unused);
6067 BlockPusher bp(*m_irb, makeMarker(bcOff()), exit);
6068 gen(BeginCatch);
6069 auto sp = body();
6070 gen(EndCatch, m_irb->fp(), sp);
6072 return exit;
6076 * Create a catch block that spills the current state of the eval stack. The
6077 * incoming value of spillVals will be the top of the spilled stack: values in
6078 * the eval stack will be appended to spillVals to form the sources for the
6079 * SpillStack.
6081 Block* HhbcTranslator::makeCatch(std::vector<SSATmp*> spillVals,
6082 int64_t numPop) {
6083 return makeCatchImpl([&] {
6084 auto spills = peekSpillValues();
6085 spills.insert(spills.begin(), spillVals.begin(), spillVals.end());
6086 return emitSpillStack(m_irb->sp(), spills, numPop);
6091 * Create a catch block with no SpillStack. Some of our optimizations rely on
6092 * the ability to insert code on *every* path out of a trace, so we can't
6093 * simply elide the catch block in the cases that want an empty body.
6095 Block* HhbcTranslator::makeCatchNoSpill() {
6096 return makeCatchImpl([&] { return m_irb->sp(); });
6100 * Create a block corresponding to bytecode control flow.
6102 Block* HhbcTranslator::makeBlock(Offset offset) {
6103 return m_irb->makeBlock(offset);
6106 SSATmp* HhbcTranslator::emitSpillStack(SSATmp* sp,
6107 const std::vector<SSATmp*>& spillVals,
6108 int64_t extraOffset) {
6109 std::vector<SSATmp*> ssaArgs{
6110 sp, cns(int64_t(m_irb->stackDeficit() + extraOffset))
6112 ssaArgs.insert(ssaArgs.end(), spillVals.rbegin(), spillVals.rend());
6114 auto args = std::make_pair(ssaArgs.size(), &ssaArgs[0]);
6115 return gen(SpillStack, args);
6118 SSATmp* HhbcTranslator::spillStack() {
6119 auto newSp = emitSpillStack(m_irb->sp(), peekSpillValues());
6120 m_irb->evalStack().clear();
6121 m_irb->clearStackDeficit();
6122 return newSp;
6125 void HhbcTranslator::prepareForSideExit() {
6126 spillStack();
6129 void HhbcTranslator::exceptionBarrier() {
6130 auto const sp = spillStack();
6131 gen(ExceptionBarrier, sp);
6134 SSATmp* HhbcTranslator::ldStackAddr(int32_t offset, TypeConstraint tc) {
6135 m_irb->constrainStack(offset, tc);
6136 // You're almost certainly doing it wrong if you want to get the address of a
6137 // stack cell that's in m_irb->evalStack().
6138 assert(offset >= (int32_t)m_irb->evalStack().numCells());
6139 return gen(
6140 LdStackAddr,
6141 Type::PtrToGen,
6142 StackOffset(offset + m_irb->stackDeficit() - m_irb->evalStack().numCells()),
6143 m_irb->sp()
6147 SSATmp* HhbcTranslator::unbox(SSATmp* val, Block* exit) {
6148 auto const type = val->type();
6149 // If we don't have an exit the LdRef can't be a guard.
6150 auto const inner = exit ? (type & Type::BoxedCell).innerType() : Type::Cell;
6152 if (type.isBoxed() || type.notBoxed()) {
6153 m_irb->constrainValue(val, DataTypeCountness);
6154 return type.isBoxed() ? gen(LdRef, inner, exit, val) : val;
6157 return m_irb->cond(
6159 [&](Block* taken) {
6160 return gen(CheckType, Type::BoxedCell, taken, val);
6162 [&](SSATmp* box) { // Next: val is a ref
6163 m_irb->constrainValue(box, DataTypeCountness);
6164 return gen(LdRef, inner, exit, box);
6166 [&] { // Taken: val is unboxed
6167 return gen(AssertType, Type::Cell, val);
6171 SSATmp* HhbcTranslator::ldLoc(uint32_t locId, Block* exit, TypeConstraint tc) {
6172 assert(IMPLIES(exit == nullptr, !inPseudoMain()));
6174 auto const opStr = inPseudoMain() ? "LdGbl" : "LdLoc";
6175 m_irb->constrainLocal(locId, tc, opStr);
6177 if (inPseudoMain()) {
6178 auto const type = m_irb->localType(locId, tc).relaxToGuardable();
6179 assert(!type.isSpecialized());
6180 assert(type == type.dropConstVal());
6182 // We don't support locals being type Gen, so if we ever get into such a
6183 // case, we need to punt.
6184 if (type == Type::Gen) PUNT(LdGbl-Gen);
6185 return gen(LdGbl, type, exit, LocalId(locId), m_irb->fp());
6188 return gen(LdLoc, Type::Gen, LocalId(locId), m_irb->fp());
6191 SSATmp* HhbcTranslator::ldLocAddr(uint32_t locId, TypeConstraint tc) {
6192 m_irb->constrainLocal(locId, tc, "LdLocAddr");
6193 return gen(LdLocAddr, Type::PtrToGen, LocalId(locId), m_irb->fp());
6197 * Load a local, and if it's boxed dereference to get the inner cell.
6199 * Note: For boxed values, this will generate a LdRef instruction which
6200 * takes the given exit trace in case the inner type doesn't match
6201 * the tracked type for this local. This check may be optimized away
6202 * if we can determine that the inner type must match the tracked type.
6204 SSATmp* HhbcTranslator::ldLocInner(uint32_t locId,
6205 Block* ldrefExit,
6206 Block* ldgblExit,
6207 TypeConstraint constraint) {
6208 // We only care if the local is KindOfRef or not. DataTypeCountness
6209 // gets us that.
6210 auto loc = ldLoc(locId, ldgblExit, DataTypeCountness);
6211 assert((loc->type().isBoxed() || loc->type().notBoxed()) &&
6212 "Currently we don't handle traces where locals are maybeBoxed");
6214 auto value = loc->type().isBoxed()
6215 ? gen(LdRef, loc->type().innerType(), ldrefExit, loc)
6216 : loc;
6217 m_irb->constrainValue(value, constraint);
6218 return value;
6222 * This is a wrapper to ldLocInner that also emits the RaiseUninitLoc if the
6223 * local is uninitialized. The catchBlock argument may be provided if the
6224 * caller requires the catch trace to be generated at a point earlier than when
6225 * it calls this function.
6227 SSATmp* HhbcTranslator::ldLocInnerWarn(uint32_t id,
6228 Block* ldrefExit,
6229 Block* ldgblExit,
6230 TypeConstraint constraint,
6231 Block* catchBlock /* = nullptr */) {
6232 if (!catchBlock) catchBlock = makeCatch();
6233 auto const locVal = ldLocInner(id, ldrefExit, ldgblExit, constraint);
6234 auto const varName = curFunc()->localVarName(id);
6236 auto warnUninit = [&] {
6237 if (varName != nullptr) {
6238 gen(RaiseUninitLoc, catchBlock, cns(varName));
6240 return cns(Type::InitNull);
6243 m_irb->constrainLocal(id, DataTypeCountnessInit, "ldLocInnerWarn");
6244 if (locVal->type() <= Type::Uninit) {
6245 return warnUninit();
6248 if (locVal->type().maybe(Type::Uninit)) {
6249 // The local might be Uninit so we have to check at runtime.
6250 return m_irb->cond(
6252 [&](Block* taken) {
6253 gen(CheckInit, taken, locVal);
6255 [&] { // Next: local is Init
6256 return locVal;
6258 [&] { // Taken: local is Uninit
6259 return warnUninit();
6263 return locVal;
6267 * Store to a local, if it's boxed set the value on the inner cell.
6269 * Returns the value that was stored to the local. Assumes that 'newVal'
6270 * has already been incremented, with this Store consuming the
6271 * ref-count increment. If the caller of this function needs to
6272 * push the stored value on stack, it should set 'incRefNew' so that
6273 * 'newVal' will have its ref-count incremented.
6275 * Pre: !newVal->type().isBoxed() && !newVal->type().maybeBoxed()
6276 * Pre: exit != nullptr if the local may be boxed
6278 SSATmp* HhbcTranslator::stLocImpl(uint32_t id,
6279 Block* ldrefExit,
6280 Block* ldgblExit,
6281 SSATmp* newVal,
6282 bool decRefOld,
6283 bool incRefNew) {
6284 assert(!newVal->type().maybeBoxed());
6286 auto const cat = decRefOld ? DataTypeCountness : DataTypeGeneric;
6287 auto const oldLoc = ldLoc(id, ldgblExit, cat);
6288 assert(oldLoc->type().isBoxed() || oldLoc->type().notBoxed());
6290 if (oldLoc->type().notBoxed()) {
6291 genStLocal(id, m_irb->fp(), newVal);
6292 if (incRefNew) gen(IncRef, newVal);
6293 if (decRefOld) gen(DecRef, oldLoc);
6294 return newVal;
6297 // It's important that the IncRef happens after the LdRef, since the
6298 // LdRef is also a guard on the inner type and may side-exit.
6299 auto const innerCell = gen(
6300 LdRef, oldLoc->type().innerType(), ldrefExit, oldLoc
6302 gen(StRef, oldLoc, newVal);
6303 if (incRefNew) gen(IncRef, newVal);
6304 if (decRefOld) {
6305 gen(DecRef, innerCell);
6306 m_irb->constrainValue(oldLoc, TypeConstraint(DataTypeCountness,
6307 DataTypeCountness));
6310 return newVal;
6313 SSATmp* HhbcTranslator::pushStLoc(uint32_t id,
6314 Block* ldrefExit,
6315 Block* ldgblExit,
6316 SSATmp* newVal) {
6317 constexpr bool decRefOld = true;
6318 constexpr bool incRefNew = true;
6319 SSATmp* ret = stLocImpl(
6321 ldrefExit,
6322 ldgblExit,
6323 newVal,
6324 decRefOld,
6325 incRefNew
6328 // Approximately mimic hhbc guard relaxation. When RefcountOpts are
6329 // enabled a SetL followed by a PopC will not touch the refcount,
6330 // since the IncRef will be cancelled by the DecRef.
6331 auto outputPopped = curSrcKey().advanced().op() == OpPopC &&
6332 m_irb->localType(id, DataTypeGeneric).notBoxed() &&
6333 RuntimeOption::EvalHHIRRefcountOpts;
6335 auto const cat = outputPopped ? DataTypeGeneric : DataTypeCountness;
6336 m_irb->constrainValue(ret, cat);
6337 return push(ret);
6340 SSATmp* HhbcTranslator::stLoc(uint32_t id,
6341 Block* ldrefExit,
6342 Block* ldgblExit,
6343 SSATmp* newVal) {
6344 constexpr bool decRefOld = true;
6345 constexpr bool incRefNew = false;
6346 return stLocImpl(id, ldrefExit, ldgblExit, newVal, decRefOld, incRefNew);
6349 SSATmp* HhbcTranslator::stLocNRC(uint32_t id,
6350 Block* ldrefExit,
6351 Block* ldgblExit,
6352 SSATmp* newVal) {
6353 constexpr bool decRefOld = false;
6354 constexpr bool incRefNew = false;
6355 return stLocImpl(id, ldrefExit, ldgblExit, newVal, decRefOld, incRefNew);
6358 SSATmp* HhbcTranslator::genStLocal(uint32_t id, SSATmp* fp, SSATmp* newVal) {
6359 return gen(inPseudoMain() ? StGbl : StLoc, LocalId(id), fp, newVal);
6362 void HhbcTranslator::end() {
6363 auto const nextSk = curSrcKey().advanced(curUnit());
6364 end(nextSk.offset());
6367 void HhbcTranslator::end(Offset nextPc) {
6368 if (m_hasExit) return;
6370 if (nextPc >= curFunc()->past()) {
6371 // We have fallen off the end of the func's bytecodes. This happens
6372 // when the function's bytecodes end with an unconditional
6373 // backwards jump so that nextPc is out of bounds and causes an
6374 // assertion failure in unit.cpp. The common case for this comes
6375 // from the default value funclets, which are placed after the end
6376 // of the function, with an unconditional branch back to the start
6377 // of the function. So you should see this in any function with
6378 // default params.
6379 return;
6381 setBcOff(nextPc, true);
6382 auto const sp = spillStack();
6383 gen(SyncABIRegs, m_irb->fp(), sp);
6384 gen(ReqBindJmp, BCOffset(nextPc));
6387 void HhbcTranslator::endBlock(Offset next) {
6388 if (m_irb->blockExists(next)) {
6389 emitJmp(next,
6390 false /* breakTracelet */,
6391 nullptr);
6395 void HhbcTranslator::checkStrictlyInteger(
6396 SSATmp*& key, KeyType& keyType, bool& checkForInt) {
6397 checkForInt = false;
6398 if (key->isA(Type::Int)) {
6399 keyType = KeyType::Int;
6400 } else {
6401 assert(key->isA(Type::Str));
6402 keyType = KeyType::Str;
6403 if (key->isConst()) {
6404 int64_t i;
6405 if (key->strVal()->isStrictlyInteger(i)) {
6406 keyType = KeyType::Int;
6407 key = cns(i);
6409 } else {
6410 checkForInt = true;
6415 bool HhbcTranslator::inPseudoMain() const {
6416 return Translator::liveFrameIsPseudoMain();