Store num args instead of offset in prologue and func entry SrcKeys
[hiphop-php.git] / hphp / runtime / vm / jit / irgen-resumable.cpp
blob81aba043d5711acadb8370c06d09265b5e257481
1 /*
2 +----------------------------------------------------------------------+
3 | HipHop for PHP |
4 +----------------------------------------------------------------------+
5 | Copyright (c) 2010-present Facebook, Inc. (http://www.facebook.com) |
6 +----------------------------------------------------------------------+
7 | This source file is subject to version 3.01 of the PHP license, |
8 | that is bundled with this package in the file LICENSE, and is |
9 | available through the world-wide-web at the following url: |
10 | http://www.php.net/license/3_01.txt |
11 | If you did not receive a copy of the PHP license and are unable to |
12 | obtain it through the world-wide-web, please send a note to |
13 | license@php.net so we can mail you a copy immediately. |
14 +----------------------------------------------------------------------+
17 #include "hphp/runtime/ext/asio/ext_wait-handle.h"
18 #include "hphp/runtime/ext/asio/ext_async-function-wait-handle.h"
19 #include "hphp/runtime/ext/asio/ext_async-generator.h"
20 #include "hphp/runtime/ext/asio/ext_static-wait-handle.h"
21 #include "hphp/runtime/ext/generator/ext_generator.h"
22 #include "hphp/runtime/base/repo-auth-type.h"
24 #include "hphp/runtime/vm/hhbc-codec.h"
25 #include "hphp/runtime/vm/resumable.h"
26 #include "hphp/runtime/vm/unwind.h"
28 #include "hphp/runtime/vm/jit/analysis.h"
29 #include "hphp/runtime/vm/jit/irgen-call.h"
30 #include "hphp/runtime/vm/jit/irgen-control.h"
31 #include "hphp/runtime/vm/jit/irgen-exit.h"
32 #include "hphp/runtime/vm/jit/irgen-inlining.h"
33 #include "hphp/runtime/vm/jit/irgen-internal.h"
34 #include "hphp/runtime/vm/jit/irgen-interpone.h"
35 #include "hphp/runtime/vm/jit/irgen-ret.h"
36 #include "hphp/runtime/vm/jit/irgen-types.h"
37 #include "hphp/runtime/vm/jit/normalized-instruction.h"
39 #include "hphp/util/trace.h"
41 namespace HPHP::jit::irgen {
43 namespace {
45 //////////////////////////////////////////////////////////////////////
47 TRACE_SET_MOD(hhir);
49 // Returns true and fills "locals" with types of the function's locals if this
50 // op is an Await in "tail position" (whose results are immediately returned).
51 bool isTailAwait(const IRGS& env, std::vector<Type>& locals) {
52 auto const unit = curUnit(env);
53 auto const func = curFunc(env);
54 auto const cls = curClass(env);
55 auto sk = curSrcKey(env);
57 TRACE(2, "isTailAwait analysis:\n");
58 if (sk.op() != Op::Await) {
59 FTRACE(2, " Non-Await opcode: {}\n", instrToString(sk.pc(), func));
60 return false;
61 } else if (func->isGenerator()) {
62 FTRACE(2, " Function is a generator: {}\n", func->fullName());
63 return false;
64 } else if (func->lookupVarId(s_86metadata.get()) != kInvalidId) {
65 FTRACE(2, " Function has metadata: {}\n", s_86metadata.get()->data());
66 return false;
68 auto const offset = findCatchHandler(func, bcOff(env));
69 if (offset != kInvalidOffset) {
70 FTRACE(2, " Found catch block at offset: {}\n", offset);
71 return false;
74 // In some cases, we'll use a temporary local for tail awaits.
75 // Track up to one usage of such a variable.
76 auto resultLocal = kInvalidId;
77 sk.advance(func);
78 for (auto i = 0; i < func->numLocals(); i++) {
79 auto const loc = Location::Local { safe_cast<uint32_t>(i) };
80 locals.push_back(env.irb->fs().typeOf(loc));
83 // Place a limit on the number of iterations in case of infinite loops.
84 for (auto i = 256; i-- > 0;) {
85 FTRACE(2, " {}\n", instrToString(sk.pc(), func));
86 switch (sk.op()) {
87 case Op::RetC: return resultLocal == kInvalidId;
88 case Op::AssertRATStk: break;
89 case Op::AssertRATL: {
90 auto const type = typeFromRAT(getImm(sk.pc(), 1, unit).u_RATA, cls);
91 locals[getImm(sk.pc(), 0).u_ILA] &= type;
92 break;
94 case Op::PopL: {
95 if (resultLocal != kInvalidId) return false;
96 resultLocal = getImm(sk.pc(), 0).u_LA;
97 locals[resultLocal] = TCell;
98 break;
100 case Op::PushL: {
101 if (resultLocal != getImm(sk.pc(), 0).u_LA) return false;
102 locals[resultLocal] = TUninit;
103 resultLocal = kInvalidId;
104 break;
106 case Op::Jmp: {
107 sk = SrcKey(sk, sk.offset() + getImm(sk.pc(), 0).u_BA);
108 continue;
110 default: return false;
112 sk.advance(func);
115 TRACE(2, " Processed too many opcodes; bailing\n");
116 return false;
119 void doTailAwaitDecRefs(IRGS& env, const std::vector<Type>& locals) {
120 auto const shouldFreeInline = [&]{
121 if (locals.size() > RO::EvalHHIRInliningMaxReturnLocals) return false;
122 auto numRefCounted = 0;
123 for (auto i = 0; i < locals.size(); i++) {
124 if (locals[i].maybe(TCounted)) numRefCounted++;
126 return numRefCounted <= RO::EvalHHIRInliningMaxReturnDecRefs;
127 }();
129 if (shouldFreeInline) {
130 for (auto i = 0; i < locals.size(); i++) {
131 if (!locals[i].maybe(TCounted)) continue;
132 auto const data = LocalId { safe_cast<uint32_t>(i) };
133 gen(env, AssertLoc, data, locals[i], fp(env));
134 decRef(
135 env,
136 gen(env, LdLoc, data, locals[i], fp(env)),
137 static_cast<DecRefProfileId>(i)
140 } else {
141 gen(env, GenericRetDecRefs, fp(env));
143 decRefThis(env);
146 template<class Hook>
147 void suspendHook(IRGS& env, Hook hook) {
148 ringbufferMsg(env, Trace::RBTypeFuncExit, curFunc(env)->fullName());
149 ifThen(
150 env,
151 [&] (Block* taken) {
152 gen(env, CheckSurpriseFlags, taken, anyStackRegister(env));
154 [&] {
155 hint(env, Block::Hint::Unlikely);
156 hook();
161 void implAwaitE(IRGS& env, SSATmp* child, Offset suspendOffset,
162 Offset resumeOffset) {
163 assertx(curFunc(env)->isAsync());
164 assertx(resumeMode(env) != ResumeMode::Async);
165 // FIXME(T88328140): ifThenElse() emits unreachable code with bad state
166 // assertx(spOffBCFromStackBase(env) == spOffEmpty(env));
167 assertx(child->type() <= TObj);
169 // Bind address at which the execution should resume after awaiting.
170 auto const func = curFunc(env);
171 auto const suspendOff = cns(env, suspendOffset);
172 auto const resumeAddr = [&]{
173 auto const resumeSk = SrcKey(func, resumeOffset, ResumeMode::Async);
174 auto const bindData = LdBindAddrData { resumeSk, spOffEmpty(env) + 1 };
175 return gen(env, LdBindAddr, bindData);
178 if (!curFunc(env)->isGenerator()) {
179 // Create the AsyncFunctionWaitHandle object. CreateAFWH takes care of
180 // copying local variables and iterators. We don't support tracing when
181 // we do the tail-call optimization, so we push the suspend hook here.
182 auto const createNewAFWH = [&]{
183 auto const wh = gen(env, CreateAFWH, fp(env),
184 cns(env, func->numSlotsInFrame()),
185 resumeAddr(), suspendOff, child);
186 // Constructing a waithandle teleports locals and iterators to the heap,
187 // kill them here to improve alias analysis.
188 for (uint32_t i = 0; i < func->numLocals(); ++i) {
189 gen(env, KillLoc, LocalId{i}, fp(env));
191 for (uint32_t i = 0; i < func->numIterators(); ++i) {
192 gen(env, KillIter, IterId{i}, fp(env));
194 suspendHook(env, [&] {
195 auto const asyncAR = gen(env, LdAFWHActRec, wh);
196 gen(env, SuspendHookAwaitEF, fp(env), asyncAR, wh);
198 return wh;
201 // We don't need to create the new AFWH if we can do a tail-call check.
202 auto const waitHandle = [&]{
203 std::vector<Type> locals;
204 if (RO::EnableArgsInBacktraces) return createNewAFWH();
205 if (!isTailAwait(env, locals)) return createNewAFWH();
207 // We can run out of tailFrameIds and fail to make this optimization.
208 auto const tailFrameId = getAsyncFrameId(curSrcKey(env));
209 if (tailFrameId == kInvalidAsyncFrameId) return createNewAFWH();
210 auto const type = Type::ExactObj(c_AsyncFunctionWaitHandle::classof());
212 return cond(env,
213 [&](Block* taken) {
214 gen(env, CheckSurpriseFlags, taken, anyStackRegister(env));
215 gen(env, AFWHPushTailFrame, taken, child, cns(env, tailFrameId));
217 [&]{
218 doTailAwaitDecRefs(env, locals);
219 return gen(env, AssertType, type, child);
221 [&]{ return createNewAFWH(); }
223 }();
225 gen(env, StImplicitContextWH, waitHandle);
227 if (isInlining(env)) {
228 suspendFromInlined(env, waitHandle);
229 return;
232 // Return control to the caller.
233 auto const spAdjust = offsetToReturnSlot(env);
234 auto const retData = RetCtrlData { spAdjust, false, AuxUnion{0} };
235 gen(env, RetCtrl, retData, sp(env), fp(env), waitHandle);
236 } else {
237 assertx(!isInlining(env));
239 // Create the AsyncGeneratorWaitHandle object.
240 auto const waitHandle =
241 gen(env, CreateAGWH, fp(env), resumeAddr(), suspendOff, child);
243 // Call the suspend hook.
244 suspendHook(env, [&] {
245 gen(env, SuspendHookAwaitEG, fp(env), waitHandle);
248 gen(env, StImplicitContextWH, waitHandle);
250 // Return control to the caller (AG::next()).
251 auto const spAdjust = offsetFromIRSP(env, BCSPRelOffset{-1});
252 auto const retData = RetCtrlData { spAdjust, true, AuxUnion{0} };
253 gen(env, RetCtrl, retData, sp(env), fp(env), waitHandle);
257 void implAwaitR(IRGS& env, SSATmp* child, Offset suspendOffset,
258 Offset resumeOffset) {
259 assertx(curFunc(env)->isAsync());
260 assertx(resumeMode(env) == ResumeMode::Async);
261 // FIXME(T88328140): ifThenElse() emits unreachable code with bad state
262 // assertx(spOffBCFromStackBase(env) == spOffEmpty(env));
263 assertx(child->isA(TObj));
264 assertx(!isInlining(env));
266 // We must do this before we do anything, because it can throw, and we can't
267 // start tearing down the AFWH before that or the unwinder won't be able to
268 // react.
269 suspendHook(env, [&] {
270 gen(env, SuspendHookAwaitR, fp(env), child);
273 // Prepare child for establishing dependency.
274 gen(env, AFWHPrepareChild, fp(env), child);
276 // Suspend the async function.
277 auto const resumeSk = SrcKey(curFunc(env), resumeOffset, ResumeMode::Async);
278 auto const bindData = LdBindAddrData { resumeSk, spOffEmpty(env) + 1 };
279 auto const resumeAddr = gen(env, LdBindAddr, bindData);
280 gen(env, StArResumeAddr, SuspendOffset { suspendOffset }, fp(env),
281 resumeAddr);
283 // Set up the dependency.
284 gen(env, AFWHBlockOn, fp(env), child);
286 // Call stub that will either transfer control to another ResumableWaitHandle,
287 // or return control back to the scheduler. Leave SP pointing to a single
288 // uninitialized cell which will be filled by the stub.
289 auto const spAdjust = offsetFromIRSP(env, BCSPRelOffset{-1});
290 gen(env, AsyncSwitchFast, IRSPRelOffsetData { spAdjust }, sp(env), fp(env));
293 SSATmp* implYieldGen(IRGS& env, SSATmp* key, SSATmp* value) {
294 if (key != nullptr) {
295 // Teleport yielded key.
296 auto const oldKey = gen(env, LdContArKey, TInitCell, fp(env));
297 gen(env, StContArKey, fp(env), key);
298 decRef(env, oldKey, DecRefProfileId::ResumableOldKey);
300 if (key->type() <= TInt) {
301 gen(env, ContArUpdateIdx, fp(env), key);
303 } else {
304 // Increment key.
305 if (curFunc(env)->isPairGenerator()) {
306 auto const newIdx = gen(env, ContArIncIdx, fp(env));
307 auto const oldKey = gen(env, LdContArKey, TInitCell, fp(env));
308 gen(env, StContArKey, fp(env), newIdx);
309 decRef(env, oldKey, DecRefProfileId::ResumableOldKey);
310 } else {
311 // Fast path: if this generator has no yield k => v, it is
312 // guaranteed that the key is an int.
313 gen(env, ContArIncKey, fp(env));
317 // Teleport yielded value.
318 auto const oldValue = gen(env, LdContArValue, TInitCell, fp(env));
319 gen(env, StContArValue, fp(env), value);
320 decRef(env, oldValue, DecRefProfileId::ResumableOldValue);
322 // Return value of iteration.
323 return cns(env, TInitNull);
326 SSATmp* implYieldAGen(IRGS& env, SSATmp* key, SSATmp* value) {
327 key = key ? key : cns(env, TInitNull);
329 if (resumeMode(env) == ResumeMode::Async) {
330 auto const spAdjust = offsetFromIRSP(env, BCSPRelOffset{-1});
331 gen(env, AsyncGenYieldR, IRSPRelOffsetData { spAdjust }, sp(env), fp(env),
332 key, value);
333 return nullptr;
336 // Wrap the key and value into a tuple.
337 auto const keyValueTuple = gen(env, AllocVec, VanillaVecData { 2 });
338 gen(env, InitVecElem, IndexData { 0 }, keyValueTuple, key);
339 gen(env, InitVecElem, IndexData { 1 }, keyValueTuple, value);
341 // Wrap the tuple into a StaticWaitHandle.
342 return gen(env, CreateSSWH, keyValueTuple);
345 void implYield(IRGS& env, bool withKey) {
346 assertx(resumeMode(env) != ResumeMode::None);
347 assertx(curFunc(env)->isGenerator());
348 assertx(spOffBCFromStackBase(env) == spOffEmpty(env) + (withKey ? 2 : 1));
349 assertx(!isInlining(env));
351 suspendHook(env, [&] {
352 gen(env, SuspendHookYield, fp(env));
355 // Resumable::setResumeAddr(resumeAddr, suspendOffset)
356 auto const suspendOffset = bcOff(env);
357 auto const resumeOffset = nextBcOff(env);
358 auto const resumeSk = SrcKey(curFunc(env), resumeOffset, ResumeMode::GenIter);
359 auto const bindData = LdBindAddrData { resumeSk, spOffEmpty(env) + 1 };
360 auto const resumeAddr = gen(env, LdBindAddr, bindData);
361 gen(env,
362 StArResumeAddr,
363 SuspendOffset { suspendOffset },
364 fp(env),
365 resumeAddr);
367 // Set state from Running to Started.
368 gen(env, StContArState,
369 GeneratorState { BaseGenerator::State::Started },
370 fp(env));
372 // No inc/dec-ref as keys and values are teleported.
373 auto const value = popC(env, DataTypeGeneric);
374 auto const key = withKey ? popC(env) : nullptr;
376 auto const retVal = !curFunc(env)->isAsync()
377 ? implYieldGen(env, key, value)
378 : implYieldAGen(env, key, value);
380 // Return to the asio scheduler already handled.
381 if (retVal == nullptr) return;
383 // Return control to the caller (Gen::next()).
384 assertx(resumeMode(env) == ResumeMode::GenIter);
385 auto const spAdjust = offsetFromIRSP(env, BCSPRelOffset{-1});
386 auto const retData = RetCtrlData { spAdjust, true, AuxUnion{0} };
387 gen(env, RetCtrl, retData, sp(env), fp(env), retVal);
391 * HHBBC may have proven something about the inner type of this awaitable.
393 * So, we may have an assertion on the type of the top of the stack after
394 * this instruction. We know the next bytecode instruction is reachable from
395 * fallthrough on the Await, so if it is an AssertRATStk 0, anything coming
396 * out of the awaitable must be a subtype of that type, so this is a safe
397 * and conservative way to do this optimization (even if our successor
398 * bytecode offset is a jump target from things we aren't thinking about
399 * here).
401 Type awaitedTypeFromHHBBC(IRGS& env, SrcKey nextSk) {
402 auto pc = nextSk.pc();
403 if (decode_op(pc) != Op::AssertRATStk) return TInitCell;
404 auto const stkLoc = decode_iva(pc);
405 if (stkLoc != 0) return TInitCell;
406 auto const rat = decodeRAT(curUnit(env), pc);
407 return typeFromRAT(rat, curClass(env));
411 * Try to determine the inner awaitable type from the source of SSATmp.
413 Type awaitedTypeFromSSATmp(const SSATmp* awaitable) {
414 awaitable = canonical(awaitable);
416 auto const inst = awaitable->inst();
417 if (inst->is(Call)) {
418 return (inst->src(2)->hasConstVal(TFunc) &&
419 !inst->extra<Call>()->asyncEagerReturn)
420 ? awaitedCallReturnType(inst->src(2)->funcVal()) : TInitCell;
422 if (inst->is(CallFuncEntry)) {
423 auto const extra = inst->extra<CallFuncEntry>();
424 return !extra->asyncEagerReturn()
425 ? awaitedCallReturnType(extra->target.func()) : TInitCell;
427 if (inst->is(CreateAFWH)) {
428 return awaitedCallReturnType(inst->func());
430 if (inst->is(DefLabel)) {
431 auto ty = TBottom;
432 auto const dsts = inst->dsts();
433 inst->block()->forEachSrc(
434 std::find(dsts.begin(), dsts.end(), awaitable) - dsts.begin(),
435 [&] (const IRInstruction*, const SSATmp* src) {
436 ty = ty | awaitedTypeFromSSATmp(src);
439 return ty;
442 return TInitCell;
445 Type awaitedType(IRGS& env, SSATmp* awaitable, SrcKey nextSk) {
446 return awaitedTypeFromHHBBC(env, nextSk) &
447 awaitedTypeFromSSATmp(awaitable);
450 bool likelySuspended(const SSATmp* awaitable) {
451 awaitable = canonical(awaitable);
452 auto const inst = awaitable->inst();
453 if (inst->is(Call) && inst->extra<Call>()->asyncEagerReturn) return true;
454 if (inst->is(CallFuncEntry) &&
455 inst->extra<CallFuncEntry>()->asyncEagerReturn()) {
456 return true;
458 if (inst->is(CreateAFWH)) return true;
459 if (inst->is(DefLabel)) {
460 auto likely = true;
461 auto const dsts = inst->dsts();
462 inst->block()->forEachSrc(
463 std::find(dsts.begin(), dsts.end(), awaitable) - dsts.begin(),
464 [&] (const IRInstruction*, const SSATmp* src) {
465 likely = likely && likelySuspended(src);
468 return likely;
471 return false;
474 void implAwaitSucceeded(IRGS& env, SSATmp* child) {
475 auto const awaitedTy = awaitedType(env, child, nextSrcKey(env));
476 auto const res = gen(env, LdWHResult, awaitedTy, child);
477 popC(env);
478 gen(env, IncRef, res);
479 decRef(env, child, DecRefProfileId::Default);
480 push(env, res);
483 void implAwaitFailed(IRGS& env, SSATmp* child, Block* exit) {
484 auto const stackEmpty = spOffBCFromStackBase(env) == spOffEmpty(env) + 1;
485 if (!stackEmpty) {
486 assertx(exit);
487 assertx(curSrcKey(env).op() == Op::WHResult);
488 gen(env, Jmp, exit);
489 return;
492 auto const offset = findCatchHandler(curFunc(env), bcOff(env));
493 auto const exception = gen(env, LdWHResult, TObj, child);
494 popC(env);
495 gen(env, IncRef, exception);
496 decRef(env, child, DecRefProfileId::Default);
497 if (offset != kInvalidOffset) {
498 push(env, exception);
499 jmpImpl(env, offset);
500 } else {
501 // There are no more catch blocks in this function, we are at the top
502 // level throw
503 hint(env, Block::Hint::Unlikely);
504 spillInlinedFrames(env);
505 auto const spOff = spOffBCFromIRSP(env);
506 auto const bcSP = gen(env, LoadBCSP, IRSPRelOffsetData { spOff }, sp(env));
507 gen(env, StVMFP, fp(env));
508 gen(env, StVMSP, bcSP);
509 gen(env, StVMPC, cns(env, uintptr_t(curSrcKey(env).pc())));
510 genStVMReturnAddr(env);
511 gen(env, StVMRegState, cns(env, eagerlyCleanState()));
512 auto const etcData = EnterTCUnwindData { spOff, true };
513 gen(env, EnterTCUnwind, etcData, exception);
517 template<class T>
518 void implAwait(IRGS& env, T handleNotFinished) {
519 // Side exit if not an Awaitable. In most conditions IsWaitHandle check
520 // will be optimized out.
521 auto const TAwaitable = Type::SubObj(c_Awaitable::classof());
522 auto const maybeChild = topC(env);
523 if (!maybeChild->isA(TObj)) return interpOne(env);
524 if (!maybeChild->type().maybe(TAwaitable)) return interpOne(env);
525 auto const exitSlow = makeExitSlow(env);
526 gen(env, JmpZero, exitSlow, gen(env, IsWaitHandle, maybeChild));
528 auto const childIsSWH =
529 maybeChild->type() <= Type::SubObj(c_StaticWaitHandle::classof());
530 auto const child = gen(env, AssertType, TAwaitable, maybeChild);
532 auto const state = gen(env, LdWHState, child);
533 assertx(c_Awaitable::STATE_SUCCEEDED == 0);
534 assertx(c_Awaitable::STATE_FAILED == 1);
536 if (childIsSWH || !likelySuspended(child)) {
537 ifThenElse(env,
538 [&] (Block* taken) { gen(env, JmpNZero, taken, state); },
539 [&] { implAwaitSucceeded(env, child); },
540 [&] {
541 if (childIsSWH) return implAwaitFailed(env, child, exitSlow);
542 ifThenElse(env,
543 [&] (Block* taken) {
544 gen(env, JmpZero, taken, gen(env, EqInt, state, cns(env, 1)));
546 [&] { implAwaitFailed(env, child, exitSlow); },
547 [&] { handleNotFinished(child, exitSlow); }
551 } else {
552 ifThenElse(env,
553 [&] (Block* taken) {
554 gen(env, JmpNZero, taken, gen(env, LteInt, state, cns(env, 1)));
556 [&] { handleNotFinished(child, exitSlow); },
557 [&] {
558 // Coming from a call with request for async eager return that did
559 // not return eagerly.
560 hint(env, Block::Hint::Unlikely);
561 IRUnit::Hinter h(env.irb->unit(), Block::Hint::Unlikely);
563 ifThenElse(env,
564 [&] (Block* taken) { gen(env, JmpNZero, taken, state); },
565 [&] {
566 implAwaitSucceeded(env, child);
567 gen(env, Jmp, makeExit(env, nextSrcKey(env)));
569 [&] { implAwaitFailed(env, child, exitSlow); }
576 //////////////////////////////////////////////////////////////////////
580 void emitWHResult(IRGS& env) {
581 implAwait(env, [&] (SSATmp*, Block* exit) {
582 gen(env, Jmp, exit);
586 void emitAwait(IRGS& env) {
587 assertx(curFunc(env)->isAsync());
588 assertx(spOffBCFromStackBase(env) == spOffEmpty(env) + 1);
590 implAwait(env, [&] (SSATmp* child, Block* exit) {
591 // Work in progress: fast path not supported yet
592 if (curFunc(env)->isAsyncGenerator() &&
593 resumeMode(env) == ResumeMode::Async) {
594 gen(env, Jmp, exit);
595 return;
598 popC(env);
599 updateMarker(env);
600 env.irb->exceptionStackBoundary();
602 if (resumeMode(env) == ResumeMode::Async) {
603 implAwaitR(env, child, bcOff(env), nextBcOff(env));
604 } else {
605 implAwaitE(env, child, bcOff(env), nextBcOff(env));
610 void emitAwaitAll(IRGS& env, LocalRange locals) {
611 auto const suspendOffset = bcOff(env);
612 auto const resumeOffset = nextBcOff(env);
613 assertx(curFunc(env)->isAsync());
614 assertx(spOffBCFromStackBase(env) == spOffEmpty(env));
616 // Work in progress: fast path not supported yet
617 auto const suspendInJitNotSupported =
618 curFunc(env)->isAsyncGenerator() && resumeMode(env) == ResumeMode::Async;
619 auto const exitSlow = makeExitSlow(env);
621 auto const cnt = [&] {
622 if (locals.count > RuntimeOption::EvalJitMaxAwaitAllUnroll) {
623 return gen(
624 env,
625 CountWHNotDone,
626 CountWHNotDoneData { locals.first, locals.count },
627 exitSlow,
628 fp(env)
631 auto cnt = cns(env, 0);
632 for (int i = 0; i < locals.count; ++i) {
633 auto const loc = ldLoc(env, locals.first + i, DataTypeSpecific);
634 if (loc->isA(TNull)) continue;
635 if (!loc->isA(TObj)) PUNT(Await-NonObject);
636 gen(env, JmpZero, exitSlow, gen(env, IsWaitHandle, loc));
637 auto const notDone = gen(env, LdWHNotDone, loc);
638 if (suspendInJitNotSupported) {
639 gen(env, JmpNZero, exitSlow, notDone);
640 } else {
641 cnt = gen(env, AddInt, cnt, notDone);
644 return cnt;
645 }();
647 if (suspendInJitNotSupported) {
648 // Side-exit if CountWHNotDone was non-zero.
649 gen(env, JmpNZero, exitSlow, cnt);
650 push(env, cns(env, TInitNull));
651 return;
654 ifThenElse(
655 env,
656 [&] (Block* taken) {
657 gen(env, JmpNZero, taken, cnt);
659 [&] { // Next: all of the wait handles are finished
660 push(env, cns(env, TInitNull));
662 [&] { // Taken: some of the wait handles have not yet completed
663 hint(env, Block::Hint::Unlikely);
664 IRUnit::Hinter h(env.irb->unit(), Block::Hint::Unlikely);
666 auto const wh = gen(
667 env,
668 CreateAAWH,
669 CreateAAWHData { locals.first, locals.count },
670 fp(env),
674 auto const state = gen(env, LdWHState, wh);
675 ifThenElse(
676 env,
677 [&] (Block* taken) {
678 gen(env, JmpNZero, taken, state);
680 [&] { // Extremely unlikely: profiling hook finished the AAWH.
681 hint(env, Block::Hint::Unused);
682 push(env, cns(env, TInitNull));
684 [&] {
685 if (resumeMode(env) == ResumeMode::Async) {
686 implAwaitR(env, wh, suspendOffset, resumeOffset);
687 } else {
688 implAwaitE(env, wh, suspendOffset, resumeOffset);
696 //////////////////////////////////////////////////////////////////////
698 void emitCreateCont(IRGS& env) {
699 auto const suspendOffset = bcOff(env);
700 auto const resumeOffset = nextBcOff(env);
701 assertx(resumeMode(env) == ResumeMode::None);
702 assertx(curFunc(env)->isGenerator());
703 assertx(spOffBCFromStackBase(env) == spOffEmpty(env));
704 assertx(!isInlining(env));
706 // Create the Generator object. CreateCont takes care of copying local
707 // variables and iterators.
708 auto const func = curFunc(env);
709 auto const resumeSk = SrcKey(func, resumeOffset, ResumeMode::GenIter);
710 auto const bindData = LdBindAddrData { resumeSk, spOffEmpty(env) + 1 };
711 auto const resumeAddr = gen(env, LdBindAddr, bindData);
712 auto const cont =
713 gen(env,
714 curFunc(env)->isAsync() ? CreateAGen : CreateGen,
715 fp(env),
716 cns(env, func->numSlotsInFrame()),
717 resumeAddr,
718 cns(env, suspendOffset));
720 // The suspend hook will decref the newly created generator if it throws.
721 auto const contAR =
722 gen(env,
723 LdContActRec,
724 IsAsyncData(curFunc(env)->isAsync()),
725 cont);
727 suspendHook(env, [&] {
728 gen(env, SuspendHookCreateCont, fp(env), contAR, cont);
731 // Grab caller info from the ActRec, free the ActRec, and return control to
732 // the caller.
733 auto const spAdjust = offsetToReturnSlot(env);
734 auto const retData = RetCtrlData { spAdjust, false, AuxUnion{0} };
735 gen(env, RetCtrl, retData, sp(env), fp(env), cont);
738 void emitContEnter(IRGS& env) {
739 assertx(curClass(env));
740 assertx(curClass(env)->classof(AsyncGenerator::getClass()) ||
741 curClass(env)->classof(Generator::getClass()));
743 auto const callBCOffset = bcOff(env);
744 auto const isAsync = curClass(env)->classof(AsyncGenerator::getClass());
745 // Load generator's FP and resume address.
746 auto const genObj = ldThis(env);
747 auto const genFp = gen(env, LdContActRec, IsAsyncData(isAsync), genObj);
748 auto resumeAddr = gen(env, LdContResumeAddr, IsAsyncData(isAsync), genObj);
750 // Make sure function enter hook is called if needed.
751 auto const exitSlow = makeExitSlow(env);
752 gen(env, CheckSurpriseFlags, exitSlow, anyStackRegister(env));
754 // Exit to interpreter if resume address is not known.
755 resumeAddr = gen(env, CheckNonNull, exitSlow, resumeAddr);
757 // Set state from Running to Started.
758 auto const gs = GeneratorState { BaseGenerator::State::Running };
759 gen(env, StContArState, gs, genFp);
761 auto const sendVal = popC(env, DataTypeGeneric);
762 updateMarker(env);
763 env.irb->exceptionStackBoundary();
765 auto const retVal = gen(
766 env,
767 ContEnter,
768 ContEnterData { spOffBCFromIRSP(env), callBCOffset, isAsync },
769 sp(env),
770 fp(env),
771 genFp,
772 resumeAddr,
773 sendVal
776 push(env, retVal);
779 void emitYield(IRGS& env) {
780 implYield(env, false);
783 void emitYieldK(IRGS& env) {
784 implYield(env, true);
787 void emitContCheck(IRGS& env, ContCheckOp subop) {
788 assertx(curClass(env));
789 assertx(curClass(env)->classof(AsyncGenerator::getClass()) ||
790 curClass(env)->classof(Generator::getClass()));
791 auto const cont = ldThis(env);
792 auto const checkStarted = subop == ContCheckOp::CheckStarted;
793 gen(env, ContCheckNext,
794 IsAsyncData(curClass(env)->classof(AsyncGenerator::getClass())),
795 makeExitSlow(env), cont, cns(env, checkStarted));
798 void emitContValid(IRGS& env) {
799 assertx(curClass(env));
800 assertx(curClass(env)->classof(AsyncGenerator::getClass()) ||
801 curClass(env)->classof(Generator::getClass()));
802 auto const cont = ldThis(env);
803 push(env, gen(env, ContValid,
804 IsAsyncData(curClass(env)->classof(AsyncGenerator::getClass())), cont));
807 void emitContKey(IRGS& env) { PUNT(ContKey); }
808 void emitContRaise(IRGS& env) { PUNT(ContRaise); }
809 void emitContCurrent(IRGS& env) { PUNT(ContCurrent); }
811 //////////////////////////////////////////////////////////////////////