Add sub-controls for Hack array compat runtime checks
[hiphop-php.git] / hphp / runtime / vm / jit / irgen-call.cpp
blobdc07da9ed4cd806eb663ab9b3a514321616d28d0
1 /*
2 +----------------------------------------------------------------------+
3 | HipHop for PHP |
4 +----------------------------------------------------------------------+
5 | Copyright (c) 2010-present Facebook, Inc. (http://www.facebook.com) |
6 +----------------------------------------------------------------------+
7 | This source file is subject to version 3.01 of the PHP license, |
8 | that is bundled with this package in the file LICENSE, and is |
9 | available through the world-wide-web at the following url: |
10 | http://www.php.net/license/3_01.txt |
11 | If you did not receive a copy of the PHP license and are unable to |
12 | obtain it through the world-wide-web, please send a note to |
13 | license@php.net so we can mail you a copy immediately. |
14 +----------------------------------------------------------------------+
16 #include "hphp/runtime/vm/jit/irgen-call.h"
18 #include "hphp/runtime/base/stats.h"
19 #include "hphp/runtime/vm/runtime.h"
21 #include "hphp/runtime/vm/jit/meth-profile.h"
22 #include "hphp/runtime/vm/jit/normalized-instruction.h"
23 #include "hphp/runtime/vm/jit/target-profile.h"
24 #include "hphp/runtime/vm/jit/type-constraint.h"
25 #include "hphp/runtime/vm/jit/type.h"
27 #include "hphp/runtime/vm/jit/irgen-builtin.h"
28 #include "hphp/runtime/vm/jit/irgen-create.h"
29 #include "hphp/runtime/vm/jit/irgen-exit.h"
30 #include "hphp/runtime/vm/jit/irgen-internal.h"
31 #include "hphp/runtime/vm/jit/irgen-types.h"
33 namespace HPHP { namespace jit { namespace irgen {
35 namespace {
37 //////////////////////////////////////////////////////////////////////
39 const StaticString s_self("self");
40 const StaticString s_parent("parent");
41 const StaticString s_static("static");
43 //////////////////////////////////////////////////////////////////////
45 const Func*
46 findCuf(Op /*op*/, SSATmp* callable, const Func* ctxFunc, const Class*& cls,
47 StringData*& invName, bool& forward, bool& needsUnitLoad) {
48 cls = nullptr;
49 invName = nullptr;
50 needsUnitLoad = false;
52 const StringData* str =
53 callable->hasConstVal(TStr) ? callable->strVal() : nullptr;
54 auto arr = [&]() -> const ArrayData* {
55 if (callable->hasConstVal(TArr)) return callable->arrVal();
56 if (callable->hasConstVal(TVec)) return callable->vecVal();
57 return nullptr;
58 }();
60 StringData* sclass = nullptr;
61 StringData* sname = nullptr;
62 if (str) {
63 auto const lookup = lookupImmutableFunc(ctxFunc->unit(), str);
64 if (lookup.func) {
65 needsUnitLoad = lookup.needsUnitLoad;
66 return lookup.func;
68 String name(const_cast<StringData*>(str));
69 int pos = name.find("::");
70 if (pos <= 0 || pos + 2 >= name.size() ||
71 name.find("::", pos + 2) != String::npos) {
72 return nullptr;
74 sclass = makeStaticString(name.substr(0, pos).get());
75 sname = makeStaticString(name.substr(pos + 2).get());
76 } else if (arr) {
77 if (arr->size() != 2) return nullptr;
78 auto const e0 = arr->get(int64_t(0), false).unboxed();
79 auto const e1 = arr->get(int64_t(1), false).unboxed();
80 if (!isStringType(e0.type()) || !isStringType(e1.type())) return nullptr;
81 sclass = e0.val().pstr;
82 sname = e1.val().pstr;
83 String name(sname);
84 if (name.find("::") != String::npos) return nullptr;
85 } else {
86 return nullptr;
89 auto ctx = ctxFunc->cls();
90 bool isExact = true;
91 if (sclass->isame(s_self.get())) {
92 if (!ctx) return nullptr;
93 cls = ctx;
94 forward = true;
95 } else if (sclass->isame(s_parent.get())) {
96 if (!ctx || !ctx->parent()) return nullptr;
97 cls = ctx->parent();
98 forward = true;
99 } else if (sclass->isame(s_static.get())) {
100 if (!ctx) return nullptr;
101 cls = ctx;
102 isExact = false;
103 } else {
104 cls = Unit::lookupUniqueClassInContext(sclass, ctx);
105 if (!cls) return nullptr;
108 bool magicCall = false;
109 const Func* f = lookupImmutableMethod(
110 cls, sname, magicCall, /* staticLookup = */ true, ctxFunc, isExact);
111 if (!f || (!isExact && !f->isImmutableFrom(cls))) return nullptr;
112 if (forward && !ctx->classof(f->cls())) {
114 * To preserve the invariant that the lsb class
115 * is an instance of the context class, we require
116 * that f's class is an instance of the context class.
117 * This is conservative, but without it, we would need
118 * a runtime check to decide whether or not to forward
119 * the lsb class
121 return nullptr;
123 if (magicCall) invName = sname;
124 return f;
127 bool canInstantiateClass(const Class* cls) {
128 return cls && isNormalClass(cls) && !isAbstract(cls);
131 //////////////////////////////////////////////////////////////////////
133 // Pushing for object method when we don't know the Func* statically.
134 void fpushObjMethodUnknown(IRGS& env,
135 SSATmp* obj,
136 const StringData* methodName,
137 uint32_t numParams,
138 bool shouldFatal) {
139 emitIncStat(env, Stats::ObjMethod_cached, 1);
140 fpushActRec(env,
141 cns(env, TNullptr), // Will be set by LdObjMethod
142 obj,
143 numParams,
144 nullptr,
145 cns(env, false));
146 auto const objCls = gen(env, LdObjClass, obj);
148 // This is special. We need to move the stackpointer in case LdObjMethod
149 // calls a destructor. Otherwise it would clobber the ActRec we just pushed.
150 updateMarker(env);
151 env.irb->exceptionStackBoundary();
153 gen(env,
154 LdObjMethod,
155 LdObjMethodData {
156 spOffBCFromIRSP(env), methodName, shouldFatal
158 objCls,
159 sp(env));
163 * Looks for a Func named methodName in iface, or any of the interfaces it
164 * implements. returns nullptr if none was found, or if its interface's
165 * vtableSlot is kInvalidSlot.
167 const Func* findInterfaceMethod(const Class* iface,
168 const StringData* methodName) {
170 auto checkOneInterface = [methodName](const Class* i) -> const Func* {
171 if (i->preClass()->ifaceVtableSlot() == kInvalidSlot) return nullptr;
173 const Func* func = i->lookupMethod(methodName);
174 always_assert(!func || func->cls() == i);
175 return func;
178 if (auto const func = checkOneInterface(iface)) return func;
180 for (auto pface : iface->allInterfaces().range()) {
181 if (auto const func = checkOneInterface(pface)) {
182 return func;
186 return nullptr;
189 void fpushObjMethodExactFunc(
190 IRGS& env,
191 SSATmp* obj,
192 const Class* exactClass,
193 const Func* func,
194 const StringData* methodName,
195 uint32_t numParams
198 * lookupImmutableMethod will return Funcs from AttrUnique classes, but in
199 * this case, we have an object, so there's no need to check that the class
200 * exists.
202 * Static function: store base class into this slot instead of obj and decref
203 * the obj that was pushed as the this pointer since the obj won't be in the
204 * actrec and thus MethodCache::lookup won't decref it.
206 * Static closure body: we still need to pass the object instance for the
207 * closure prologue to properly do its dispatch (and extract use vars). It
208 * will decref it and put the class on the actrec before entering the "real"
209 * cloned closure body.
211 SSATmp* objOrCls = obj;
212 emitIncStat(env, Stats::ObjMethod_known, 1);
213 if (func->isStaticInPrologue()) {
214 objOrCls = exactClass ? cns(env, exactClass) : gen(env, LdObjClass, obj);
215 decRef(env, obj);
217 fpushActRec(
218 env,
219 cns(env, func),
220 objOrCls,
221 numParams,
222 methodName,
223 cns(env, false)
227 const Func*
228 lookupInterfaceFuncForFPushObjMethod(IRGS& /*env*/, const Class* baseClass,
229 const StringData* methodName) {
230 if (!baseClass) return nullptr;
231 if (!classIsUniqueInterface(baseClass)) return nullptr;
233 return findInterfaceMethod(baseClass, methodName);
236 void fpushObjMethodInterfaceFunc(
237 IRGS& env,
238 SSATmp* obj,
239 const Func* ifaceFunc,
240 int32_t numParams
242 auto const vtableSlot = ifaceFunc->cls()->preClass()->ifaceVtableSlot();
244 emitIncStat(env, Stats::ObjMethod_ifaceslot, 1);
245 auto cls = gen(env, LdObjClass, obj);
246 auto func = gen(env, LdIfaceMethod,
247 IfaceMethodData{vtableSlot, ifaceFunc->methodSlot()},
248 cls);
249 SSATmp* objOrCls = obj;
250 if (ifaceFunc->attrs() & AttrStatic) {
251 decRef(env, obj);
252 objOrCls = cls;
254 fpushActRec(
255 env,
256 func,
257 objOrCls,
258 numParams,
259 /* invName */nullptr,
260 cns(env, false)
262 return;
265 void fpushObjMethodNonExactFunc(IRGS& env, SSATmp* obj,
266 const Class* /*baseClass*/, const Func* func,
267 uint32_t numParams) {
268 emitIncStat(env, Stats::ObjMethod_methodslot, 1);
269 auto const clsTmp = gen(env, LdObjClass, obj);
270 auto const funcTmp = gen(
271 env,
272 LdClsMethod,
273 clsTmp,
274 cns(env, -(func->methodSlot() + 1))
276 SSATmp* objOrCls = obj;
277 if (func->isStaticInPrologue()) {
278 decRef(env, obj);
279 objOrCls = clsTmp;
281 fpushActRec(
282 env,
283 funcTmp,
284 objOrCls,
285 numParams,
286 /* invName */nullptr,
287 cns(env, false)
291 void fpushObjMethodWithBaseClass(
292 IRGS& env,
293 SSATmp* obj,
294 const Class* baseClass,
295 const StringData* methodName,
296 uint32_t numParams,
297 bool shouldFatal,
298 bool exactClass
300 bool magicCall = false;
301 if (auto const func = lookupImmutableMethod(
302 baseClass, methodName, magicCall,
303 /* staticLookup: */ false, curFunc(env), exactClass)) {
304 if (exactClass ||
305 func->attrs() & AttrPrivate ||
306 func->isImmutableFrom(baseClass)) {
307 fpushObjMethodExactFunc(env, obj,
308 exactClass ? baseClass : nullptr,
309 func,
310 magicCall ? methodName : nullptr,
311 numParams);
312 return;
314 fpushObjMethodNonExactFunc(env, obj, baseClass, func, numParams);
315 return;
318 if (auto const func =
319 lookupInterfaceFuncForFPushObjMethod(env, baseClass, methodName)) {
320 fpushObjMethodInterfaceFunc(env, obj, func, numParams);
321 return;
324 fpushObjMethodUnknown(env, obj, methodName, numParams, shouldFatal);
327 const StaticString methProfileKey{ "MethProfile-FPushObjMethod" };
329 inline SSATmp* ldCtxForClsMethod(IRGS& env,
330 const Func* callee,
331 SSATmp* callCtx,
332 const Class* cls,
333 bool exact) {
335 assertx(callCtx->isA(TCls));
337 auto gen_missing_this = [&] {
338 if (needs_missing_this_check(callee)) {
339 gen(env, RaiseMissingThis, cns(env, callee));
341 return callCtx;
344 if (callee->isStatic()) return callCtx;
345 if (!hasThis(env)) {
346 return gen_missing_this();
349 auto const maybeUseThis = curClass(env)->classof(cls);
350 if (!maybeUseThis && !cls->classof(curClass(env))) {
351 return gen_missing_this();
354 auto skipAT = [] (SSATmp* val) {
355 while (val->inst()->is(AssertType, CheckType, CheckCtxThis)) {
356 val = val->inst()->src(0);
358 return val;
361 auto const canUseThis = [&] () -> bool {
362 // A static::foo() call can always pass through a $this
363 // from the caller (if it has one). Match the common patterns
364 auto cc = skipAT(callCtx);
365 if (cc->inst()->is(LdObjClass, LdClsCtx, LdClsCctx)) {
366 cc = skipAT(cc->inst()->src(0));
367 if (cc->inst()->is(LdCtx, LdCctx)) return true;
369 return maybeUseThis && (exact || cls->attrs() & AttrNoOverride);
370 }();
372 auto const ctx = gen(env, LdCtx, fp(env));
373 auto thiz = castCtxThis(env, ctx);
375 if (canUseThis) {
376 gen(env, IncRef, thiz);
377 return thiz;
380 return cond(
381 env,
382 [&] (Block* taken) {
383 auto thizCls = gen(env, LdObjClass, thiz);
384 auto flag = exact ?
385 gen(env, ExtendsClass, ExtendsClassData{ cls, true }, thizCls) :
386 gen(env, InstanceOf, thizCls, callCtx);
387 gen(env, JmpZero, taken, flag);
389 [&] {
390 gen(env, IncRef, thiz);
391 return thiz;
393 [&] {
394 hint(env, Block::Hint::Unlikely);
395 gen_missing_this();
396 return gen(env, ConvClsToCctx, callCtx);
400 bool optimizeProfiledPushMethod(IRGS& env,
401 TargetProfile<MethProfile>& profile,
402 SSATmp* objOrCls,
403 Block* sideExit,
404 const StringData* methodName,
405 uint32_t numParams,
406 bool dynamic) {
407 if (!profile.optimizing()) return false;
408 if (env.transFlags.noProfiledFPush && env.firstBcInst) return false;
410 always_assert(objOrCls->type().subtypeOfAny(TObj, TCls));
412 auto isStaticCall = objOrCls->type() <= TCls;
414 auto getCtx = [&](const Func* callee,
415 SSATmp* ctx,
416 const Class* cls) -> SSATmp* {
417 if (isStaticCall) {
418 return ldCtxForClsMethod(env, callee, ctx,
419 cls ? cls : callee->cls(), cls != nullptr);
421 if (!callee->isStatic()) return ctx;
422 assertx(ctx->type() <= TObj);
423 auto ret = cls ? cns(env, cls) : gen(env, LdObjClass, ctx);
424 decRef(env, ctx);
425 return ret;
428 MethProfile data = profile.data(MethProfile::reduce);
430 if (auto const uniqueMeth = data.uniqueMeth()) {
431 bool isMagic = !uniqueMeth->name()->isame(methodName);
432 if (auto const uniqueClass = data.uniqueClass()) {
433 // Profiling saw a unique class.
434 // Check for it, then burn in the func
435 auto const refined = gen(env, CheckType,
436 isStaticCall ?
437 Type::ExactCls(uniqueClass) :
438 Type::ExactObj(uniqueClass),
439 sideExit, objOrCls);
440 env.irb->constrainValue(refined, TypeConstraint(uniqueClass));
441 auto const ctx = getCtx(uniqueMeth, refined, uniqueClass);
442 fpushActRec(env, cns(env, uniqueMeth), ctx, numParams,
443 isMagic ? methodName : nullptr, cns(env, dynamic));
444 return true;
447 if (isMagic) return false;
449 // Although there were multiple classes, the method was unique
450 // (this comes up eg for a final method in a base class). But
451 // note that we can't allow a magic call here since it's possible
452 // that an as-yet-unseen derived class defines a method named
453 // methodName.
454 auto const slot = cns(env, uniqueMeth->methodSlot());
455 auto const negSlot = cns(env, -1 - uniqueMeth->methodSlot());
456 auto const ctx = getCtx(uniqueMeth, objOrCls, nullptr);
457 auto const cls = isStaticCall ? objOrCls : gen(env, LdObjClass, objOrCls);
458 auto const len = gen(env, LdFuncVecLen, cls);
459 auto const cmp = gen(env, LteInt, len, slot);
460 gen(env, JmpNZero, sideExit, cmp);
461 auto const meth = gen(env, LdClsMethod, cls, negSlot);
462 auto const same = gen(env, EqFunc, meth, cns(env, uniqueMeth));
463 gen(env, JmpZero, sideExit, same);
464 fpushActRec(
465 env,
466 cns(env, uniqueMeth),
467 ctx,
468 numParams,
469 nullptr,
470 cns(env, dynamic)
472 return true;
475 if (auto const baseMeth = data.baseMeth()) {
476 if (!baseMeth->name()->isame(methodName)) {
477 return false;
480 // The method was defined in a common base class. We just need to
481 // check for an instance of the class, and then use the method
482 // from the right slot.
483 auto const ctx = getCtx(baseMeth, objOrCls, nullptr);
484 auto const cls = isStaticCall ? objOrCls : gen(env, LdObjClass, objOrCls);
485 auto flag = gen(env, ExtendsClass,
486 ExtendsClassData{baseMeth->cls(), true}, cls);
487 gen(env, JmpZero, sideExit, flag);
488 auto negSlot = cns(env, -1 - baseMeth->methodSlot());
489 auto meth = gen(env, LdClsMethod, cls, negSlot);
490 fpushActRec(
491 env,
492 meth,
493 ctx,
494 numParams,
495 nullptr,
496 cns(env, dynamic)
498 return true;
501 if (auto const intfMeth = data.interfaceMeth()) {
502 if (!intfMeth->name()->isame(methodName)) {
503 return false;
505 // The method was defined in a common interface
506 auto const ctx = getCtx(intfMeth, objOrCls, nullptr);
507 auto const cls = isStaticCall ? objOrCls : gen(env, LdObjClass, objOrCls);
508 auto flag = gen(env, InstanceOfIfaceVtable,
509 ClassData{intfMeth->cls()}, cls);
510 gen(env, JmpZero, sideExit, flag);
511 auto const vtableSlot =
512 intfMeth->cls()->preClass()->ifaceVtableSlot();
513 auto meth = gen(env, LdIfaceMethod,
514 IfaceMethodData{vtableSlot, intfMeth->methodSlot()},
515 cls);
516 fpushActRec(env, meth, ctx, numParams, nullptr, cns(env, dynamic));
517 return true;
520 return false;
523 void fpushObjMethod(IRGS& env,
524 SSATmp* obj,
525 const StringData* methodName,
526 uint32_t numParams,
527 bool shouldFatal,
528 Block* sideExit) {
529 emitIncStat(env, Stats::ObjMethod_total, 1);
531 assertx(obj->type() <= TObj);
532 if (auto cls = obj->type().clsSpec().cls()) {
533 if (!env.irb->constrainValue(obj, TypeConstraint(cls).setWeak())) {
534 // If we know the class without having to specialize a guard any further,
535 // use it.
536 fpushObjMethodWithBaseClass(
537 env, obj, cls, methodName, numParams, shouldFatal,
538 obj->type().clsSpec().exact() || cls->attrs() & AttrNoOverride);
539 return;
543 folly::Optional<TargetProfile<MethProfile>> profile;
544 if (RuntimeOption::RepoAuthoritative) {
545 profile.emplace(env.context, env.irb->curMarker(), methProfileKey.get());
547 if (optimizeProfiledPushMethod(env, *profile, obj, sideExit,
548 methodName, numParams, false)) {
549 return;
553 fpushObjMethodWithBaseClass(env, obj, nullptr, methodName, numParams,
554 shouldFatal, false);
556 if (profile && profile->profiling()) {
557 gen(env,
558 ProfileMethod,
559 ProfileMethodData {
560 spOffBCFromIRSP(env), profile->handle()
562 sp(env),
563 cns(env, TNullptr));
567 void fpushFuncObj(IRGS& env, uint32_t numParams) {
568 auto const slowExit = makeExitSlow(env);
569 auto const obj = popC(env);
570 auto const cls = gen(env, LdObjClass, obj);
571 auto const func = gen(env, LdObjInvoke, slowExit, cls);
572 fpushActRec(env, func, obj, numParams, nullptr, cns(env, false));
575 void fpushFuncArr(IRGS& env, uint32_t numParams) {
576 auto const thisAR = fp(env);
578 auto const arr = popC(env);
579 fpushActRec(
580 env,
581 cns(env, TNullptr),
582 cns(env, TNullptr),
583 numParams,
584 nullptr,
585 cns(env, true)
588 // This is special. We need to move the stackpointer incase LdArrFuncCtx
589 // calls a destructor. Otherwise it would clobber the ActRec we just
590 // pushed.
591 updateMarker(env);
592 env.irb->exceptionStackBoundary();
594 gen(env, LdArrFuncCtx,
595 IRSPRelOffsetData { spOffBCFromIRSP(env) },
596 arr, sp(env), thisAR);
597 decRef(env, arr);
600 // FPushCuf when the callee is not known at compile time.
601 void fpushCufUnknown(IRGS& env, Op op, uint32_t numParams) {
602 if (op != Op::FPushCuf) {
603 PUNT(fpushCufUnknown-nonFPushCuf);
606 if (topC(env)->isA(TObj)) return fpushFuncObj(env, numParams);
608 if (!topC(env)->type().subtypeOfAny(TArr, TVec, TStr)) {
609 PUNT(fpushCufUnknown);
612 auto const callable = popC(env);
613 fpushActRec(
614 env,
615 cns(env, TNullptr),
616 cns(env, TNullptr),
617 numParams,
618 nullptr,
619 cns(env, true)
623 * This is a similar case to lookup for functions in FPushFunc or
624 * FPushObjMethod. We can throw in a weird situation where the
625 * ActRec is already on the stack, but this bytecode isn't done
626 * executing yet. See arPreliveOverwriteCells for details about why
627 * we need this marker.
629 updateMarker(env);
630 env.irb->exceptionStackBoundary();
632 auto const opcode =
633 callable->type().subtypeOfAny(TArr, TVec)
634 ? LdArrFPushCuf
635 : LdStrFPushCuf;
636 gen(env, opcode,
637 IRSPRelOffsetData { spOffBCFromIRSP(env) },
638 callable, sp(env), fp(env));
639 decRef(env, callable);
642 SSATmp* forwardCtx(IRGS& env, SSATmp* ctx, SSATmp* funcTmp) {
643 assertx(ctx->type() <= TCtx);
644 assertx(funcTmp->type() <= TFunc);
646 auto forwardDynamicCallee = [&] {
647 if (!hasThis(env)) {
648 gen(env, RaiseMissingThis, funcTmp);
649 return ctx;
652 auto const obj = castCtxThis(env, ctx);
653 gen(env, IncRef, obj);
654 return obj;
657 if (funcTmp->hasConstVal()) {
658 assertx(!funcTmp->funcVal()->isClosureBody());
659 if (funcTmp->funcVal()->isStatic()) {
660 return gen(env, FwdCtxStaticCall, ctx);
661 } else {
662 return forwardDynamicCallee();
666 return cond(env,
667 [&](Block* target) {
668 gen(env, CheckFuncStatic, target, funcTmp);
670 forwardDynamicCallee,
671 [&] {
672 return gen(env, FwdCtxStaticCall, ctx);
676 void implFPushCufOp(IRGS& env, Op op, uint32_t numArgs) {
677 const bool safe = op == OpFPushCufSafe;
678 bool forward = op == OpFPushCufF;
679 SSATmp* callable = topC(env, BCSPRelOffset{safe ? 1 : 0});
681 const Class* cls = nullptr;
682 StringData* invName = nullptr;
683 bool needsUnitLoad = false;
684 auto const callee = findCuf(op, callable, curFunc(env), cls, invName,
685 forward, needsUnitLoad);
686 if (!callee) return fpushCufUnknown(env, op, numArgs);
688 SSATmp* ctx;
689 auto const safeFlag = cns(env, true); // This is always true until the slow
690 // exits below are implemented
691 auto func = cns(env, callee);
692 if (cls) {
693 auto const exitSlow = makeExitSlow(env);
694 if (!classIsPersistentOrCtxParent(env, cls)) {
695 // The miss path is complicated and rare. Punt for now. This must be
696 // checked before we IncRef the context below, because the slow exit will
697 // want to do that same IncRef via InterpOne.
698 gen(env, LdClsCachedSafe, exitSlow, cns(env, cls->name()));
701 if (forward) {
702 ctx = forwardCtx(env, ldCtx(env), cns(env, callee));
703 } else {
704 ctx = ldCtxForClsMethod(env, callee, cns(env, cls), cls, true);
706 } else {
707 ctx = cns(env, TNullptr);
708 if (needsUnitLoad) {
709 // Ensure the function's unit is loaded. The miss path is complicated and
710 // rare. Punt for now.
711 gen(
712 env,
713 LdFuncCachedSafe,
714 LdFuncCachedData { callee->name() },
715 makeExitSlow(env)
720 auto const defaultVal = safe ? popC(env) : nullptr;
721 popDecRef(env); // callable
722 if (safe) {
723 push(env, defaultVal);
724 push(env, safeFlag);
727 fpushActRec(
728 env,
729 func,
730 ctx,
731 numArgs,
732 invName,
733 cns(env, !callable->isA(TObj))
737 void fpushFuncCommon(IRGS& env,
738 uint32_t numParams,
739 const StringData* name,
740 const StringData* fallback) {
742 auto const lookup = lookupImmutableFunc(curUnit(env), name);
743 if (lookup.func) {
744 // We know the function, but we have to ensure its unit is loaded. Use
745 // LdFuncCached, ignoring the result to ensure this.
746 if (lookup.needsUnitLoad) gen(env, LdFuncCached, LdFuncCachedData { name });
747 fpushActRec(env,
748 cns(env, lookup.func),
749 cns(env, TNullptr),
750 numParams,
751 nullptr,
752 cns(env, false));
753 return;
756 auto const ssaFunc = fallback
757 ? gen(env, LdFuncCachedU, LdFuncCachedUData { name, fallback })
758 : gen(env, LdFuncCached, LdFuncCachedData { name });
759 fpushActRec(env,
760 ssaFunc,
761 cns(env, TNullptr),
762 numParams,
763 nullptr,
764 cns(env, false));
767 void implUnboxR(IRGS& env) {
768 auto const exit = makeExit(env);
769 auto const srcBox = popR(env);
770 auto const unboxed = unbox(env, srcBox, exit);
771 if (unboxed == srcBox) {
772 // If the Unbox ended up being a noop, don't bother refcounting
773 push(env, unboxed);
774 } else {
775 pushIncRef(env, unboxed);
776 decRef(env, srcBox);
780 void implBoxR(IRGS& env) {
781 auto const value = pop(env, DataTypeGeneric);
782 auto const boxed = boxHelper(
783 env,
784 gen(env, AssertType, TCell | TBoxedInitCell, value),
785 [] (SSATmp* ) {});
786 push(env, boxed);
790 //////////////////////////////////////////////////////////////////////
792 const StaticString
793 s_http_response_header("http_response_header"),
794 s_php_errormsg("php_errormsg");
797 * Could `inst' access the locals in the environment of `caller' according to
798 * the given predicate?
800 template <typename P>
801 bool callAccessesLocals(const NormalizedInstruction& inst,
802 const Func* caller,
803 P predicate) {
804 // We don't handle these two cases, because we don't compile functions
805 // containing them:
806 assertx(caller->lookupVarId(s_php_errormsg.get()) == -1);
807 assertx(caller->lookupVarId(s_http_response_header.get()) == -1);
809 auto const unit = caller->unit();
811 auto const checkTaintId = [&](Id id) {
812 auto const str = unit->lookupLitstrId(id);
813 // Only builtins can access a caller's locals or be skip-frame.
814 auto const callee = Unit::lookupBuiltin(str);
815 return callee && predicate(callee);
818 if (inst.op() == OpFCallBuiltin) return checkTaintId(inst.imm[2].u_SA);
819 if (!isFCallStar(inst.op())) return false;
821 auto const fpi = caller->findFPI(inst.source.offset());
822 assertx(fpi != nullptr);
823 auto const fpushPC = unit->at(fpi->m_fpushOff);
824 auto const op = peek_op(fpushPC);
826 switch (op) {
827 case OpFPushFunc:
828 case OpFPushCufIter:
829 case OpFPushCuf:
830 case OpFPushCufF:
831 case OpFPushCufSafe:
832 // Dynamic calls. If we've forbidden dynamic calls to functions which
833 // access the caller's frame, we know this can't be one.
834 return !disallowDynamicVarEnvFuncs();
836 case OpFPushFuncD:
837 return checkTaintId(getImm(fpushPC, 1).u_SA);
839 case OpFPushFuncU:
840 return checkTaintId(getImm(fpushPC, 1).u_SA) ||
841 checkTaintId(getImm(fpushPC, 2).u_SA);
843 case OpFPushObjMethod:
844 case OpFPushObjMethodD:
845 case OpFPushClsMethod:
846 case OpFPushClsMethodS:
847 case OpFPushClsMethodSD:
848 case OpFPushClsMethodD:
849 case OpFPushCtor:
850 case OpFPushCtorD:
851 case OpFPushCtorI:
852 case OpFPushCtorS:
853 // None of these access the caller's frame because they all call methods,
854 // not top-level functions. However, they might still be marked as
855 // skip-frame and therefore something they call can affect our frame. We
856 // don't have to worry about this if they're not allowed to call such
857 // functions dynamically.
858 return !disallowDynamicVarEnvFuncs();
860 default:
861 always_assert("Unhandled FPush type in callAccessesLocals" && 0);
866 * Could `inst' write to the locals in the environment of `caller'?
868 * This occurs, e.g., if `inst' is a call to extract().
870 bool callWritesLocals(const NormalizedInstruction& inst,
871 const Func* caller) {
872 return callAccessesLocals(inst, caller, funcWritesLocals);
876 * Could `inst' read from the locals in the environment of `caller'?
878 * This occurs, e.g., if `inst' is a call to compact().
880 bool callReadsLocals(const NormalizedInstruction& inst,
881 const Func* caller) {
882 return callAccessesLocals(inst, caller, funcReadsLocals);
886 * Could `inst' attempt to read the caller frame?
888 * This occurs, e.g., if `inst' is a call to is_callable().
890 bool callNeedsCallerFrame(const NormalizedInstruction& inst,
891 const Func* caller) {
892 auto const unit = caller->unit();
893 auto const checkTaintId = [&](Id id) {
894 auto const str = unit->lookupLitstrId(id);
896 // If the function was invoked dynamically, we can't be sure.
897 if (!str) return true;
899 // Only builtins can inspect the caller frame; we know these are all
900 // loaded ahead of time and unique/persistent.
901 auto const f = Unit::lookupBuiltin(str);
902 return f && funcNeedsCallerFrame(f);
905 if (inst.op() == OpFCallBuiltin) return checkTaintId(inst.imm[2].u_SA);
906 if (!isFCallStar(inst.op())) return false;
908 auto const fpi = caller->findFPI(inst.source.offset());
909 assertx(fpi != nullptr);
910 auto const fpushPC = unit->at(fpi->m_fpushOff);
911 auto const op = peek_op(fpushPC);
913 if (op == OpFPushFunc) return true;
914 if (op == OpFPushFuncD) return checkTaintId(getImm(fpushPC, 1).u_SA);
915 if (op == OpFPushFuncU) {
916 return checkTaintId(getImm(fpushPC, 1).u_SA) ||
917 checkTaintId(getImm(fpushPC, 2).u_SA);
920 return false;
923 //////////////////////////////////////////////////////////////////////
927 //////////////////////////////////////////////////////////////////////
929 void fpushActRec(IRGS& env,
930 SSATmp* func,
931 SSATmp* objOrClass,
932 uint32_t numArgs,
933 const StringData* invName,
934 SSATmp* dynamicCall) {
935 ActRecInfo info;
936 info.spOffset = offsetFromIRSP(
937 env,
938 BCSPRelOffset{-int32_t{kNumActRecCells}}
940 info.numArgs = numArgs;
942 gen(
943 env,
944 SpillFrame,
945 info,
946 sp(env),
947 func,
948 objOrClass,
949 invName ? cns(env, invName) : cns(env, TNullptr),
950 dynamicCall
954 //////////////////////////////////////////////////////////////////////
956 void emitFPushCufIter(IRGS& env, uint32_t numParams, int32_t itId) {
957 auto const func = gen(env, LdCufIterFunc, TFunc, IterId(itId), fp(env));
958 auto const ctx = gen(
959 env,
960 LdCufIterCtx,
961 TCtx | TNullptr,
962 IterId(itId),
963 fp(env)
965 auto const invName = gen(
966 env,
967 LdCufIterInvName,
968 TStr | TNullptr,
969 IterId(itId),
970 fp(env)
972 auto const dynamic = gen(
973 env,
974 LdCufIterDynamic,
975 IterId(itId),
976 fp(env)
979 ActRecInfo info;
980 info.spOffset = offsetFromIRSP(
981 env,
982 BCSPRelOffset{-int32_t{kNumActRecCells}}
984 info.numArgs = numParams;
986 ifNonNull(env, ctx, [&](SSATmp* t) { gen(env, IncRef, t); });
987 ifNonNull(env, invName, [&](SSATmp* t) { gen(env, IncRef, t); });
988 gen(env, SpillFrame, info, sp(env), func, ctx, invName, dynamic);
991 void emitFPushCuf(IRGS& env, uint32_t numArgs) {
992 implFPushCufOp(env, Op::FPushCuf, numArgs);
994 void emitFPushCufF(IRGS& env, uint32_t numArgs) {
995 implFPushCufOp(env, Op::FPushCufF, numArgs);
997 void emitFPushCufSafe(IRGS& env, uint32_t numArgs) {
998 implFPushCufOp(env, Op::FPushCufSafe, numArgs);
1001 void emitFPushCtor(IRGS& env, uint32_t numParams, uint32_t slot) {
1002 auto const cls = takeClsRef(env, slot);
1003 auto const func = gen(env, LdClsCtor, cls, fp(env));
1004 auto const obj = gen(env, AllocObj, cls);
1005 pushIncRef(env, obj);
1006 fpushActRec(env, func, obj, numParams, nullptr, cns(env, true));
1009 void emitFPushCtorD(IRGS& env,
1010 uint32_t numParams,
1011 const StringData* className) {
1012 auto const cls = Unit::lookupUniqueClassInContext(className, curClass(env));
1013 bool const persistentCls = classIsPersistentOrCtxParent(env, cls);
1014 bool const canInstantiate = canInstantiateClass(cls);
1015 bool const fastAlloc =
1016 persistentCls &&
1017 canInstantiate &&
1018 !cls->hasNativePropHandler();
1020 auto const func = lookupImmutableCtor(cls, curClass(env));
1022 // We don't need to actually do the load if we have a persistent class
1023 auto const cachedCls = persistentCls ? nullptr :
1024 gen(env, LdClsCached, cns(env, className));
1026 // If we know the Class*, we can use it; if its not persistent,
1027 // we will have loaded it above.
1028 auto const ssaCls = cls ? cns(env, cls) : cachedCls;
1030 auto const ssaFunc = func ? cns(env, func)
1031 : gen(env, LdClsCtor, ssaCls, fp(env));
1032 auto const obj = fastAlloc ? allocObjFast(env, cls)
1033 : gen(env, AllocObj, ssaCls);
1034 pushIncRef(env, obj);
1035 fpushActRec(env, ssaFunc, obj, numParams, nullptr, cns(env, false));
1038 void emitFPushCtorI(IRGS& env,
1039 uint32_t numParams,
1040 uint32_t clsIx) {
1041 auto const preClass = curFunc(env)->unit()->lookupPreClassId(clsIx);
1042 auto const cls = [&] () -> Class* {
1043 auto const c = preClass->namedEntity()->clsList();
1044 if (c && (c->attrs() & AttrUnique)) return c;
1045 return nullptr;
1046 }();
1047 bool const persistentCls = classIsPersistentOrCtxParent(env, cls);
1048 bool const canInstantiate = canInstantiateClass(cls);
1049 bool const fastAlloc =
1050 persistentCls &&
1051 canInstantiate &&
1052 !cls->hasNativePropHandler();
1054 auto const func = lookupImmutableCtor(cls, curClass(env));
1056 auto const ssaCls = [&] {
1057 if (!persistentCls) {
1058 auto const cachedCls = cond(
1059 env,
1060 [&] (Block* taken) {
1061 return gen(env, LdClsCachedSafe, taken, cns(env, preClass->name()));
1063 [&] (SSATmp* val) {
1064 return val;
1066 [&] {
1067 return gen(env, DefCls, cns(env, clsIx));
1070 if (!cls) return cachedCls;
1072 return cns(env, cls);
1073 }();
1075 auto const ssaFunc = func ? cns(env, func)
1076 : gen(env, LdClsCtor, ssaCls, fp(env));
1077 auto const obj = fastAlloc ? allocObjFast(env, cls)
1078 : gen(env, AllocObj, ssaCls);
1079 pushIncRef(env, obj);
1080 fpushActRec(env, ssaFunc, obj, numParams, nullptr, cns(env, false));
1083 namespace {
1085 SSATmp* specialClsRefToCls(IRGS& env, SpecialClsRef ref) {
1086 switch (ref) {
1087 case SpecialClsRef::Static:
1088 if (!curClass(env)) PUNT(SpecialClsRef-NoCls);
1089 return gen(env, LdClsCtx, ldCtx(env));
1090 case SpecialClsRef::Self:
1091 if (auto const clss = curClass(env)) return cns(env, clss);
1092 PUNT(SpecialClsRef-NoCls);
1093 break;
1094 case SpecialClsRef::Parent:
1095 if (auto const clss = curClass(env)) {
1096 if (auto const parent = clss->parent()) return cns(env, parent);
1098 PUNT(SpecialClsRef-NoCls);
1099 break;
1101 always_assert(false);
1106 void emitFPushCtorS(IRGS& env, uint32_t numParams, SpecialClsRef ref) {
1107 auto const cls = specialClsRefToCls(env, ref);
1108 auto const func = gen(env, LdClsCtor, cls, fp(env));
1109 auto const obj = gen(env, AllocObj, cls);
1110 pushIncRef(env, obj);
1111 fpushActRec(env, func, obj, numParams, nullptr, cns(env, false));
1114 void emitFPushFuncD(IRGS& env, uint32_t nargs, const StringData* name) {
1115 fpushFuncCommon(env, nargs, name, nullptr);
1118 void emitFPushFuncU(IRGS& env,
1119 uint32_t nargs,
1120 const StringData* name,
1121 const StringData* fallback) {
1122 fpushFuncCommon(env, nargs, name, fallback);
1125 void emitFPushFunc(IRGS& env, uint32_t numParams, const ImmVector& v) {
1126 if (v.size() != 0) PUNT(InOut-FPushFunc);
1128 if (topC(env)->isA(TObj)) return fpushFuncObj(env, numParams);
1129 if (topC(env)->isA(TArr) || topC(env)->isA(TVec)) {
1130 return fpushFuncArr(env, numParams);
1133 if (!topC(env)->isA(TStr)) {
1134 PUNT(FPushFunc_not_Str);
1137 auto const funcName = popC(env);
1138 fpushActRec(env,
1139 cns(env, TNullptr),
1140 cns(env, TNullptr),
1141 numParams,
1142 nullptr,
1143 cns(env, true));
1145 updateMarker(env);
1146 env.irb->exceptionStackBoundary();
1148 gen(env, LdFunc,
1149 IRSPRelOffsetData { spOffBCFromIRSP(env) },
1150 funcName, sp(env), fp(env));
1152 decRef(env, funcName);
1155 void emitFPushObjMethodD(IRGS& env,
1156 uint32_t numParams,
1157 const StringData* methodName,
1158 ObjMethodOp subop) {
1159 TransFlags trFlags;
1160 trFlags.noProfiledFPush = true;
1161 auto sideExit = makeExit(env, trFlags);
1163 auto const obj = popC(env);
1165 if (obj->type() <= TObj) {
1166 fpushObjMethod(env, obj, methodName, numParams,
1167 true /* shouldFatal */, sideExit);
1168 return;
1171 if (obj->type() <= TInitNull && subop == ObjMethodOp::NullSafe) {
1172 fpushActRec(
1173 env,
1174 cns(env, SystemLib::s_nullFunc),
1175 cns(env, TNullptr),
1176 numParams,
1177 nullptr,
1178 cns(env, true));
1179 return;
1182 PUNT(FPushObjMethodD-nonObj);
1185 bool fpushClsMethodKnown(IRGS& env,
1186 uint32_t numParams,
1187 const StringData* methodName,
1188 SSATmp* ctxTmp,
1189 const Class *baseClass,
1190 bool exact,
1191 bool check,
1192 bool forward,
1193 bool dynamic) {
1194 bool magicCall = false;
1195 auto const func = lookupImmutableMethod(baseClass,
1196 methodName,
1197 magicCall,
1198 true /* staticLookup */,
1199 curFunc(env),
1200 exact);
1201 if (!func) return false;
1203 auto const objOrCls = forward ?
1204 ldCtx(env) :
1205 ldCtxForClsMethod(env, func, ctxTmp, baseClass, exact);
1206 if (check) {
1207 assertx(exact);
1208 if (!classIsPersistentOrCtxParent(env, baseClass)) {
1209 gen(env, LdClsCached, cns(env, baseClass->name()));
1212 auto funcTmp = exact || func->isImmutableFrom(baseClass) ?
1213 cns(env, func) :
1214 gen(env, LdClsMethod, ctxTmp, cns(env, -(func->methodSlot() + 1)));
1216 auto const ctx = forward ?
1217 forwardCtx(env, objOrCls, funcTmp) :
1218 objOrCls;
1219 fpushActRec(env,
1220 funcTmp,
1221 ctx,
1222 numParams,
1223 magicCall ? methodName : nullptr,
1224 cns(env, dynamic));
1225 return true;
1228 void emitFPushClsMethodD(IRGS& env,
1229 uint32_t numParams,
1230 const StringData* methodName,
1231 const StringData* className) {
1232 if (auto const baseClass =
1233 Unit::lookupUniqueClassInContext(className, curClass(env))) {
1234 if (fpushClsMethodKnown(env, numParams,
1235 methodName, cns(env, baseClass), baseClass,
1236 true, true, false, false)) {
1237 return;
1241 auto const slowExit = makeExitSlow(env);
1242 auto const ne = NamedEntity::get(className);
1243 auto const data = ClsMethodData { className, methodName, ne };
1245 // Look up the Func* in the targetcache. If it's not there, try the slow
1246 // path. If that fails, slow exit.
1247 auto const func = cond(
1248 env,
1249 [&] (Block* taken) {
1250 return gen(env, LdClsMethodCacheFunc, data, taken);
1252 [&] (SSATmp* func) { // next
1253 emitIncStat(env, Stats::TgtCache_StaticMethodHit, 1);
1254 return func;
1256 [&] { // taken
1257 hint(env, Block::Hint::Unlikely);
1258 auto const result = gen(env, LookupClsMethodCache, data, fp(env));
1259 return gen(env, CheckNonNull, slowExit, result);
1262 auto const clsCtx = gen(env, LdClsMethodCacheCls, data);
1264 fpushActRec(env,
1265 func,
1266 clsCtx,
1267 numParams,
1268 nullptr,
1269 cns(env, false));
1272 namespace {
1274 template <typename Peek, typename Get, typename Kill>
1275 ALWAYS_INLINE void fpushClsMethodCommon(IRGS& env,
1276 uint32_t numParams,
1277 Peek peekCls,
1278 Get getMeth,
1279 Kill killCls,
1280 bool forward,
1281 bool dynamic) {
1282 TransFlags trFlags;
1283 trFlags.noProfiledFPush = true;
1284 auto sideExit = makeExit(env, trFlags);
1286 // We can side-exit, so peek the slot rather than reading from it.
1287 auto const clsVal = peekCls();
1288 auto const methVal = getMeth();
1290 if (!methVal->isA(TStr)) {
1291 PUNT(FPushClsMethod-unknownType);
1294 folly::Optional<TargetProfile<MethProfile>> profile;
1296 if (methVal->hasConstVal()) {
1297 auto const methodName = methVal->strVal();
1298 const Class* cls = nullptr;
1299 bool exact = false;
1300 if (auto clsSpec = clsVal->type().clsSpec()) {
1301 cls = clsSpec.cls();
1302 exact = clsSpec.exact();
1305 if (cls) {
1306 if (fpushClsMethodKnown(env, numParams, methodName, clsVal, cls,
1307 exact, false, forward, dynamic)) {
1308 killCls();
1309 return;
1313 if (RuntimeOption::RepoAuthoritative &&
1314 !clsVal->hasConstVal() &&
1315 !forward) {
1316 profile.emplace(env.context, env.irb->curMarker(), methProfileKey.get());
1318 if (optimizeProfiledPushMethod(env, *profile, clsVal, sideExit,
1319 methodName, numParams, dynamic)) {
1320 killCls();
1321 return;
1326 killCls();
1327 fpushActRec(env,
1328 cns(env, TNullptr),
1329 cns(env, TNullptr),
1330 numParams,
1331 nullptr,
1332 cns(env, dynamic));
1335 * Similar to FPushFunc/FPushObjMethod, we have an incomplete ActRec on the
1336 * stack and must handle that properly if we throw or re-enter.
1338 updateMarker(env);
1339 env.irb->exceptionStackBoundary();
1341 gen(env, LookupClsMethod,
1342 LookupClsMethodData { spOffBCFromIRSP(env), forward },
1343 clsVal, methVal, sp(env), fp(env));
1344 decRef(env, methVal);
1346 if (profile && profile->profiling()) {
1347 gen(env,
1348 ProfileMethod,
1349 ProfileMethodData {
1350 spOffBCFromIRSP(env), profile->handle()
1352 sp(env),
1353 clsVal);
1359 void emitFPushClsMethod(IRGS& env,
1360 uint32_t numParams,
1361 uint32_t slot,
1362 const ImmVector& v) {
1363 if (v.size() != 0) PUNT(InOut-FPushClsMethod);
1364 fpushClsMethodCommon(
1365 env,
1366 numParams,
1367 [&] { return peekClsRef(env, slot); },
1368 [&] { return popC(env); },
1369 [&] { killClsRef(env, slot); },
1370 false,
1371 true
1375 void emitFPushClsMethodS(IRGS& env,
1376 uint32_t numParams,
1377 SpecialClsRef ref,
1378 const ImmVector& v) {
1379 if (v.size() != 0) PUNT(InOut-FPushClsMethodS);
1380 fpushClsMethodCommon(
1381 env,
1382 numParams,
1383 [&] { return specialClsRefToCls(env, ref); },
1384 [&] { return popC(env); },
1385 [] {},
1386 ref == SpecialClsRef::Self || ref == SpecialClsRef::Parent,
1387 true
1391 void emitFPushClsMethodSD(IRGS& env,
1392 uint32_t numParams,
1393 SpecialClsRef ref,
1394 const StringData* name) {
1395 fpushClsMethodCommon(
1396 env,
1397 numParams,
1398 [&] { return specialClsRefToCls(env, ref); },
1399 [&] { return cns(env, name); },
1400 [] {},
1401 ref == SpecialClsRef::Self || ref == SpecialClsRef::Parent,
1402 false
1406 //////////////////////////////////////////////////////////////////////
1408 void checkFPassHint(IRGS& env, uint32_t paramId, int off, FPassHint hint,
1409 bool byRef) {
1410 if (!RuntimeOption::EvalWarnOnCallByRefAnnotationMismatch) return;
1411 switch (hint) {
1412 case FPassHint::Any: return;
1413 case FPassHint::Cell:
1414 if (!byRef) return;
1415 break;
1416 case FPassHint::Ref:
1417 if (byRef) return;
1418 break;
1421 auto const& fpiStack = env.irb->fs().fpiStack();
1422 if (!fpiStack.empty() && fpiStack.back().func) {
1423 auto const str = makeStaticString([&] {
1424 if (hint == FPassHint::Ref) {
1425 return folly::sformat(
1426 "{}() expects parameter {} by value, but the call was "
1427 "annotated with '&'",
1428 fpiStack.back().func->fullName(), paramId + 1);
1430 return folly::sformat(
1431 "{}() expects parameter {} by reference, but the call was "
1432 "not annotated with '&'",
1433 fpiStack.back().func->fullName(), paramId + 1);
1434 }());
1435 gen(env, RaiseWarning, cns(env, str));
1436 return;
1439 auto const actRecOff = offsetFromIRSP(env, BCSPRelOffset { off });
1440 auto const funcptr = gen(env, LdARFuncPtr, TFunc,
1441 IRSPRelOffsetData { actRecOff }, sp(env));
1442 gen(
1443 env,
1444 RaiseParamRefMismatch,
1445 ParamData { (int32_t)paramId },
1446 funcptr
1450 void emitRaiseFPassWarning(
1451 IRGS& env, FPassHint hint, const StringData* fname, uint32_t arg
1453 if (!RuntimeOption::EvalWarnOnCallByRefAnnotationMismatch) return;
1455 assertx(hint != FPassHint::Any);
1456 auto const str = makeStaticString(
1457 formatParamRefMismatch(fname->data(), arg, hint == FPassHint::Cell)
1460 gen(env, RaiseWarning, cns(env, str));
1464 * All fpass instructions spill the stack after they execute, because we are
1465 * sure to need that value in memory, regardless of whether we side-exit or
1466 * throw. At the level of HHBC semantics, it's illegal to pop them from the
1467 * stack until we've left the FPI region, and we will be spilling the whole
1468 * stack when we get to the FCall{D,} at the end of the region. This should
1469 * also potentially reduce the number of live registers during call sequences.
1471 * Note: there is a general problem with the spillStack mechanism, in that it
1472 * may sink stores that are not profitable to sink, but in this case we can
1473 * work around it easily.
1476 void emitFPassC(IRGS& env, uint32_t argNum, FPassHint hint) {
1477 checkFPassHint(env, argNum, argNum + 1, hint,
1478 env.currentNormalizedInstruction->preppedByRef);
1481 void emitFPassVNop(IRGS& env, uint32_t argNum, FPassHint hint) {
1482 checkFPassHint(env, argNum, argNum + 1, hint, true);
1485 void emitFPassL(IRGS& env, uint32_t argNum, int32_t id, FPassHint hint) {
1486 if (env.currentNormalizedInstruction->preppedByRef) {
1487 checkFPassHint(env, argNum, argNum, hint, true);
1488 emitVGetL(env, id);
1489 } else {
1490 checkFPassHint(env, argNum, argNum, hint, false);
1491 emitCGetL(env, id);
1495 void emitFPassS(IRGS& env, uint32_t argNum, uint32_t slot, FPassHint hint) {
1496 if (env.currentNormalizedInstruction->preppedByRef) {
1497 checkFPassHint(env, argNum, argNum + 1, hint, true);
1498 emitVGetS(env, slot);
1499 } else {
1500 checkFPassHint(env, argNum, argNum + 1, hint, false);
1501 emitCGetS(env, slot);
1505 void emitFPassG(IRGS& env, uint32_t argNum, FPassHint hint) {
1506 if (env.currentNormalizedInstruction->preppedByRef) {
1507 checkFPassHint(env, argNum, argNum + 1, hint, true);
1508 emitVGetG(env);
1509 } else {
1510 checkFPassHint(env, argNum, argNum + 1, hint, false);
1511 emitCGetG(env);
1515 void emitFPassR(IRGS& env, uint32_t argNum, FPassHint hint) {
1516 if (env.currentNormalizedInstruction->preppedByRef) {
1517 checkFPassHint(env, argNum, argNum + 1, hint, true);
1518 implBoxR(env);
1519 } else {
1520 checkFPassHint(env, argNum, argNum + 1, hint, false);
1521 implUnboxR(env);
1525 void emitUnboxR(IRGS& env) { implUnboxR(env); }
1526 void emitBoxR(IRGS& env) { implBoxR(env); }
1528 void emitFPassV(IRGS& env, uint32_t argNum, FPassHint hint) {
1529 if (env.currentNormalizedInstruction->preppedByRef) {
1530 // FPassV is a no-op when the callee expects by ref.
1531 checkFPassHint(env, argNum, argNum + 1, hint, true);
1532 return;
1535 checkFPassHint(env, argNum, argNum + 1, hint, false);
1536 auto const tmp = popV(env);
1537 pushIncRef(env, gen(env, LdRef, TInitCell, tmp));
1538 decRef(env, tmp);
1541 void emitFPassCE(IRGS& env, uint32_t argNum, FPassHint hint) {
1542 if (env.currentNormalizedInstruction->preppedByRef) {
1543 // Need to raise an error
1544 PUNT(FPassCE-byRef);
1546 checkFPassHint(env, argNum, argNum + 1, hint, false);
1549 void emitFPassCW(IRGS& env, uint32_t argNum, FPassHint hint) {
1550 if (env.currentNormalizedInstruction->preppedByRef) {
1551 // Need to raise a warning
1552 PUNT(FPassCW-byRef);
1554 checkFPassHint(env, argNum, argNum + 1, hint, false);
1557 //////////////////////////////////////////////////////////////////////
1559 void emitFCallArray(IRGS& env) {
1560 auto const callee = env.currentNormalizedInstruction->funcd;
1562 auto const writeLocals = callee
1563 ? funcWritesLocals(callee)
1564 : callWritesLocals(*env.currentNormalizedInstruction, curFunc(env));
1565 auto const readLocals = callee
1566 ? funcReadsLocals(callee)
1567 : callReadsLocals(*env.currentNormalizedInstruction, curFunc(env));
1569 auto const data = CallArrayData {
1570 spOffBCFromIRSP(env),
1573 bcOff(env),
1574 nextBcOff(env),
1575 callee,
1576 writeLocals,
1577 readLocals
1579 auto const retVal = gen(env, CallArray, data, sp(env), fp(env));
1580 push(env, retVal);
1583 void implFCallUnpack(IRGS& env, uint32_t numParams, uint32_t numOut) {
1584 auto const callee = env.currentNormalizedInstruction->funcd;
1586 auto const writeLocals = callee
1587 ? funcWritesLocals(callee)
1588 : callWritesLocals(*env.currentNormalizedInstruction, curFunc(env));
1589 auto const readLocals = callee
1590 ? funcReadsLocals(callee)
1591 : callReadsLocals(*env.currentNormalizedInstruction, curFunc(env));
1593 auto const data = CallArrayData {
1594 spOffBCFromIRSP(env),
1595 numParams,
1596 numOut,
1597 bcOff(env),
1598 nextBcOff(env),
1599 callee,
1600 writeLocals,
1601 readLocals
1603 auto const retVal = gen(env, CallArray, data, sp(env), fp(env));
1604 push(env, retVal);
1607 void emitFCallUnpack(IRGS& env, uint32_t numParams) {
1608 implFCallUnpack(env, numParams, 0);
1611 void emitFCallUnpackM(IRGS& env, uint32_t numParams, uint32_t numOut) {
1612 if (!RuntimeOption::EvalHHIRGenerateCallM) {
1613 interpOne(env, *env.currentNormalizedInstruction);
1614 return;
1616 implFCallUnpack(env, numParams, numOut - 1);
1619 SSATmp* implFCall(IRGS& env, uint32_t numParams, uint32_t numOut) {
1620 auto const returnBcOffset = nextBcOff(env) - curFunc(env)->base();
1621 auto const callee = env.currentNormalizedInstruction->funcd;
1623 auto const writeLocals = callee
1624 ? funcWritesLocals(callee)
1625 : callWritesLocals(*env.currentNormalizedInstruction, curFunc(env));
1626 auto const readLocals = callee
1627 ? funcReadsLocals(callee)
1628 : callReadsLocals(*env.currentNormalizedInstruction, curFunc(env));
1629 auto const needsCallerFrame = callee
1630 ? funcNeedsCallerFrame(callee)
1631 : callNeedsCallerFrame(
1632 *env.currentNormalizedInstruction,
1633 curFunc(env)
1636 auto op = curFunc(env)->unit()->getOp(bcOff(env));
1637 auto const retVal = gen(
1638 env,
1639 Call,
1640 CallData {
1641 spOffBCFromIRSP(env),
1642 static_cast<uint32_t>(numParams),
1643 numOut,
1644 returnBcOffset,
1645 callee,
1646 writeLocals,
1647 readLocals,
1648 needsCallerFrame,
1649 op == Op::FCallAwait
1651 sp(env),
1652 fp(env)
1655 push(env, retVal);
1656 return retVal;
1659 void emitFCall(IRGS& env, uint32_t numParams) {
1660 implFCall(env, numParams, 0);
1663 void emitFCallD(IRGS& env,
1664 uint32_t numParams,
1665 const StringData*,
1666 const StringData*) {
1667 implFCall(env, numParams, 0);
1670 void emitFCallM(IRGS& env, uint32_t numParams, uint32_t numOut) {
1671 if (!RuntimeOption::EvalHHIRGenerateCallM) {
1672 interpOne(env, *env.currentNormalizedInstruction);
1673 return;
1675 implFCall(env, numParams, numOut - 1);
1678 void emitFCallDM(IRGS& env,
1679 uint32_t numParams,
1680 uint32_t numOut,
1681 const StringData*,
1682 const StringData*) {
1683 if (!RuntimeOption::EvalHHIRGenerateCallM) {
1684 interpOne(env, *env.currentNormalizedInstruction);
1685 return;
1687 implFCall(env, numParams, numOut - 1);
1690 void emitDirectCall(IRGS& env, Func* callee, uint32_t numParams,
1691 SSATmp* const* const args) {
1692 auto const returnBcOffset = nextBcOff(env) - curFunc(env)->base();
1694 env.irb->fs().setFPushOverride(Op::FPushFuncD);
1695 fpushActRec(
1696 env,
1697 cns(env, callee),
1698 cns(env, TNullptr),
1699 numParams,
1700 nullptr,
1701 cns(env, false)
1703 assertx(!env.irb->fs().hasFPushOverride());
1705 for (int32_t i = 0; i < numParams; i++) {
1706 push(env, args[i]);
1708 updateMarker(env);
1709 env.irb->exceptionStackBoundary();
1711 auto const retVal = gen(
1712 env,
1713 Call,
1714 CallData {
1715 spOffBCFromIRSP(env),
1716 static_cast<uint32_t>(numParams),
1718 returnBcOffset,
1719 callee,
1720 funcWritesLocals(callee),
1721 funcReadsLocals(callee),
1722 funcNeedsCallerFrame(callee),
1723 false
1725 sp(env),
1726 fp(env)
1729 push(env, retVal);
1732 //////////////////////////////////////////////////////////////////////
1734 Type callReturnType(const Func* callee) {
1735 // Don't make any assumptions about functions which can be intercepted. The
1736 // interception functions can return arbitrary types.
1737 if (RuntimeOption::EvalJitEnableRenameFunction ||
1738 callee->attrs() & AttrInterceptable) {
1739 return TInitGen;
1742 if (callee->isCPPBuiltin()) {
1743 // If the function is builtin, use the builtin's return type, then take into
1744 // account coercion failures.
1745 auto type = builtinReturnType(callee);
1746 if (callee->attrs() & AttrParamCoerceModeNull) type |= TInitNull;
1747 if (callee->attrs() & AttrParamCoerceModeFalse) type |= Type::cns(false);
1748 return type;
1751 // Otherwise use HHBBC's analysis if present
1752 return typeFromRAT(callee->repoReturnType(), callee->cls());
1755 //////////////////////////////////////////////////////////////////////