Split isCallToExit() from isReturnHelper()
[hiphop-php.git] / hphp / runtime / vm / bytecode.cpp
blob689692fc6ffdda54e4f403f8b12196b44fc85912
1 /*
2 +----------------------------------------------------------------------+
3 | HipHop for PHP |
4 +----------------------------------------------------------------------+
5 | Copyright (c) 2010-present Facebook, Inc. (http://www.facebook.com) |
6 +----------------------------------------------------------------------+
7 | This source file is subject to version 3.01 of the PHP license, |
8 | that is bundled with this package in the file LICENSE, and is |
9 | available through the world-wide-web at the following url: |
10 | http://www.php.net/license/3_01.txt |
11 | If you did not receive a copy of the PHP license and are unable to |
12 | obtain it through the world-wide-web, please send a note to |
13 | license@php.net so we can mail you a copy immediately. |
14 +----------------------------------------------------------------------+
18 #include "hphp/runtime/vm/bytecode.h"
20 #include <algorithm>
21 #include <string>
22 #include <vector>
23 #include <sstream>
24 #include <iostream>
25 #include <iomanip>
26 #include <cinttypes>
28 #include <boost/filesystem.hpp>
30 #include <folly/String.h>
31 #include <folly/portability/SysMman.h>
33 #include "hphp/util/numa.h"
34 #include "hphp/util/portability.h"
35 #include "hphp/util/ringbuffer.h"
36 #include "hphp/util/text-util.h"
37 #include "hphp/util/trace.h"
39 #include "hphp/system/systemlib.h"
41 #include "hphp/runtime/base/apc-stats.h"
42 #include "hphp/runtime/base/apc-typed-value.h"
43 #include "hphp/runtime/base/array-init.h"
44 #include "hphp/runtime/base/array-iterator.h"
45 #include "hphp/runtime/base/array-provenance.h"
46 #include "hphp/runtime/base/bespoke-array.h"
47 #include "hphp/runtime/base/code-coverage.h"
48 #include "hphp/runtime/base/collections.h"
49 #include "hphp/runtime/base/container-functions.h"
50 #include "hphp/runtime/base/enum-util.h"
51 #include "hphp/runtime/base/execution-context.h"
52 #include "hphp/runtime/base/file-util.h"
53 #include "hphp/runtime/base/hhprof.h"
54 #include "hphp/runtime/base/implicit-context.h"
55 #include "hphp/runtime/base/memory-manager.h"
56 #include "hphp/runtime/base/object-data.h"
57 #include "hphp/runtime/base/program-functions.h"
58 #include "hphp/runtime/base/rds.h"
59 #include "hphp/runtime/base/repo-auth-type-codec.h"
60 #include "hphp/runtime/base/runtime-error.h"
61 #include "hphp/runtime/base/runtime-option.h"
62 #include "hphp/runtime/base/stat-cache.h"
63 #include "hphp/runtime/base/stats.h"
64 #include "hphp/runtime/base/strings.h"
65 #include "hphp/runtime/base/tv-arith.h"
66 #include "hphp/runtime/base/tv-comparisons.h"
67 #include "hphp/runtime/base/tv-conversions.h"
68 #include "hphp/runtime/base/tv-refcount.h"
69 #include "hphp/runtime/base/tv-type.h"
70 #include "hphp/runtime/base/type-structure.h"
71 #include "hphp/runtime/base/type-structure-helpers-defs.h"
72 #include "hphp/runtime/base/type-structure-helpers.h"
73 #include "hphp/runtime/base/type-variant.h"
74 #include "hphp/runtime/base/unit-cache.h"
75 #include "hphp/runtime/base/vanilla-dict.h"
76 #include "hphp/runtime/base/vanilla-keyset.h"
78 #include "hphp/runtime/ext/array/ext_array.h"
79 #include "hphp/runtime/ext/asio/ext_await-all-wait-handle.h"
80 #include "hphp/runtime/ext/asio/ext_async-function-wait-handle.h"
81 #include "hphp/runtime/ext/asio/ext_async-generator-wait-handle.h"
82 #include "hphp/runtime/ext/asio/ext_async-generator.h"
83 #include "hphp/runtime/ext/asio/ext_static-wait-handle.h"
84 #include "hphp/runtime/ext/asio/ext_wait-handle.h"
85 #include "hphp/runtime/ext/asio/ext_waitable-wait-handle.h"
86 #include "hphp/runtime/ext/std/ext_std_closure.h"
87 #include "hphp/runtime/ext/extension.h"
88 #include "hphp/runtime/ext/generator/ext_generator.h"
89 #include "hphp/runtime/ext/hh/ext_hh.h"
90 #include "hphp/runtime/ext/reflection/ext_reflection.h"
91 #include "hphp/runtime/ext/std/ext_std_variable.h"
92 #include "hphp/runtime/ext/string/ext_string.h"
93 #include "hphp/runtime/ext/json/JSON_parser.h"
95 #include "hphp/runtime/server/rpc-request-handler.h"
96 #include "hphp/runtime/server/source-root-info.h"
98 #include "hphp/runtime/vm/act-rec-defs.h"
99 #include "hphp/runtime/vm/act-rec.h"
100 #include "hphp/runtime/vm/class.h"
101 #include "hphp/runtime/vm/class-meth-data-ref.h"
102 #include "hphp/runtime/vm/cti.h"
103 #include "hphp/runtime/vm/debug/debug.h"
104 #include "hphp/runtime/vm/debugger-hook.h"
105 #include "hphp/runtime/vm/event-hook.h"
106 #include "hphp/runtime/ext/functioncredential/ext_functioncredential.h"
107 #include "hphp/runtime/vm/hh-utils.h"
108 #include "hphp/runtime/vm/hhbc-codec.h"
109 #include "hphp/runtime/vm/hhbc.h"
110 #include "hphp/runtime/vm/interp-helpers.h"
111 #include "hphp/runtime/vm/iter.h"
112 #include "hphp/runtime/vm/member-operations.h"
113 #include "hphp/runtime/vm/memo-cache.h"
114 #include "hphp/runtime/vm/method-lookup.h"
115 #include "hphp/runtime/vm/native.h"
116 #include "hphp/runtime/vm/reified-generics.h"
117 #include "hphp/runtime/vm/repo-global-data.h"
118 #include "hphp/runtime/vm/resumable.h"
119 #include "hphp/runtime/vm/runtime.h"
120 #include "hphp/runtime/vm/srckey.h"
121 #include "hphp/runtime/vm/taint/interpreter.h"
122 #include "hphp/runtime/vm/type-constraint.h"
123 #include "hphp/runtime/vm/type-profile.h"
124 #include "hphp/runtime/vm/unwind.h"
125 #include "hphp/runtime/vm/workload-stats.h"
127 #include "hphp/runtime/vm/jit/code-cache.h"
128 #include "hphp/runtime/vm/jit/enter-tc.h"
129 #include "hphp/runtime/vm/jit/perf-counters.h"
130 #include "hphp/runtime/vm/jit/service-request-handlers.h"
131 #include "hphp/runtime/vm/jit/tc.h"
132 #include "hphp/runtime/vm/jit/translator-inline.h"
133 #include "hphp/runtime/vm/jit/translator-runtime.h"
134 #include "hphp/runtime/vm/jit/translator.h"
135 #include "hphp/runtime/vm/jit/unwind-itanium.h"
137 #include "hphp/util/stacktrace-profiler.h"
140 namespace HPHP {
142 TRACE_SET_MOD(bcinterp);
144 // RepoAuthoritative has been raptured out of runtime_option.cpp. It needs
145 // to be closer to other bytecode.cpp data.
146 bool RuntimeOption::RepoAuthoritative = false;
148 using jit::TCA;
150 // GCC 4.8 has some real problems with all the inlining in this file, so don't
151 // go overboard with that version.
152 #if !defined(NDEBUG) || ((__GNUC__ == 4) && (__GNUC_MINOR__ == 8))
153 #define OPTBLD_INLINE
154 #else
155 #define OPTBLD_INLINE ALWAYS_INLINE
156 #endif
158 Class* arGetContextClass(const ActRec* ar) {
159 if (ar == nullptr) {
160 return nullptr;
162 return ar->func()->cls();
165 void frame_free_locals_no_hook(ActRec* fp) {
166 frame_free_locals_inl_no_hook(fp, fp->func()->numLocals());
169 const StaticString s_file("file");
170 const StaticString s_line("line");
172 ///////////////////////////////////////////////////////////////////////////////
174 //=============================================================================
175 // Miscellaneous decoders.
177 inline const char* prettytype(int) { return "int"; }
178 inline const char* prettytype(long) { return "long"; }
179 inline const char* prettytype(long long) { return "long long"; }
180 inline const char* prettytype(double) { return "double"; }
181 inline const char* prettytype(unsigned) { return "unsigned"; }
182 inline const char* prettytype(OODeclExistsOp) { return "OpDeclExistsOp"; }
183 inline const char* prettytype(FatalOp) { return "FatalOp"; }
184 inline const char* prettytype(IsTypeOp) { return "IsTypeOp"; }
185 inline const char* prettytype(SetOpOp) { return "SetOpOp"; }
186 inline const char* prettytype(IncDecOp) { return "IncDecOp"; }
187 inline const char* prettytype(ObjMethodOp) { return "ObjMethodOp"; }
188 inline const char* prettytype(BareThisOp) { return "BareThisOp"; }
189 inline const char* prettytype(InitPropOp) { return "InitPropOp"; }
190 inline const char* prettytype(SilenceOp) { return "SilenceOp"; }
191 inline const char* prettytype(SwitchKind) { return "SwitchKind"; }
192 inline const char* prettytype(MOpMode) { return "MOpMode"; }
193 inline const char* prettytype(QueryMOp) { return "QueryMOp"; }
194 inline const char* prettytype(SetRangeOp) { return "SetRangeOp"; }
195 inline const char* prettytype(TypeStructResolveOp) {
196 return "TypeStructResolveOp";
198 inline const char* prettytype(ReadonlyOp) { return "ReadonlyOp"; }
199 inline const char* prettytype(CudOp) { return "CudOp"; }
200 inline const char* prettytype(ContCheckOp) { return "ContCheckOp"; }
201 inline const char* prettytype(SpecialClsRef) { return "SpecialClsRef"; }
202 inline const char* prettytype(CollectionType) { return "CollectionType"; }
203 inline const char* prettytype(IsLogAsDynamicCallOp) {
204 return "IsLogAsDynamicCallOp";
207 // load a T value from *pc without incrementing
208 template<class T> T peek(PC pc) {
209 T v;
210 std::memcpy(&v, pc, sizeof v);
211 TRACE(2, "decode: Immediate %s %" PRIi64"\n", prettytype(v), int64_t(v));
212 return v;
215 template<class T> T decode(PC& pc) {
216 auto v = peek<T>(pc);
217 pc += sizeof(T);
218 return v;
221 inline const ArrayData* decode_litarr(PC& pc) {
222 return liveUnit()->lookupArrayId(decode<Id>(pc));
225 ALWAYS_INLINE TypedValue* decode_local(PC& pc) {
226 auto la = decode_iva(pc);
227 assertx(la < vmfp()->func()->numLocals());
228 return frame_local(vmfp(), la);
231 ALWAYS_INLINE local_var decode_indexed_local(PC& pc) {
232 auto la = decode_iva(pc);
233 assertx(la < vmfp()->func()->numLocals());
234 return local_var{frame_local(vmfp(), la), safe_cast<int32_t>(la)};
237 ALWAYS_INLINE named_local_var decode_named_local_var(PC& pc) {
238 auto loc = decode_named_local(pc);
239 assertx(0 <= loc.id);
240 assertx(loc.id < vmfp()->func()->numLocals());
241 assertx(kInvalidLocalName <= loc.name);
242 assertx(loc.name < vmfp()->func()->numNamedLocals());
243 return named_local_var{loc.name, frame_local(vmfp(), loc.id)};
246 ALWAYS_INLINE Iter* decode_iter(PC& pc) {
247 auto ia = decode_iva(pc);
248 return frame_iter(vmfp(), ia);
251 template<typename T>
252 OPTBLD_INLINE imm_array<T> decode_imm_array(PC& pc) {
253 auto const size = decode_iva(pc);
254 auto const arr_pc = pc;
255 pc += size * sizeof(T);
256 return imm_array<T>{size, arr_pc};
259 OPTBLD_INLINE RepoAuthType decode_rat(PC& pc) {
260 if (debug) return decodeRAT(liveUnit(), pc);
262 pc += encodedRATSize(pc);
263 return RepoAuthType{};
266 //=============================================================================
267 // Miscellaneous helpers.
269 static inline Class* frameStaticClass(ActRec* fp) {
270 if (!fp->func()->cls()) return nullptr;
271 if (fp->hasThis()) {
272 return fp->getThis()->getVMClass();
274 return fp->getClass();
277 //=============================================================================
278 // VarEnv.
280 namespace {
281 const StaticString
282 s_argc("argc"),
283 s_argv("argv"),
284 s__SERVER("_SERVER"),
285 s__GET("_GET"),
286 s__POST("_POST"),
287 s__COOKIE("_COOKIE"),
288 s__FILES("_FILES"),
289 s__ENV("_ENV"),
290 s__REQUEST("_REQUEST"),
291 s_HTTP_RAW_POST_DATA("HTTP_RAW_POST_DATA");
294 void createGlobalNVTable() {
295 assertx(!g_context->m_globalNVTable);
296 g_context->m_globalNVTable = req::make_raw<NameValueTable>();
297 auto nvTable = g_context->m_globalNVTable;
298 Variant arr(ArrayData::CreateDict());
299 nvTable->set(s_argc.get(), init_null_variant.asTypedValue());
300 nvTable->set(s_argv.get(), init_null_variant.asTypedValue());
301 nvTable->set(s__SERVER.get(), arr.asTypedValue());
302 nvTable->set(s__GET.get(), arr.asTypedValue());
303 nvTable->set(s__POST.get(), arr.asTypedValue());
304 nvTable->set(s__COOKIE.get(), arr.asTypedValue());
305 nvTable->set(s__FILES.get(), arr.asTypedValue());
306 nvTable->set(s__ENV.get(), arr.asTypedValue());
307 nvTable->set(s__REQUEST.get(), arr.asTypedValue());
308 nvTable->set(s_HTTP_RAW_POST_DATA.get(), init_null_variant.asTypedValue());
311 const StaticString s_reified_generics_var("0ReifiedGenerics");
312 const StaticString s_coeffects_var("0Coeffects");
314 Array getDefinedVariables() {
315 Array ret = Array::CreateDict();
317 NameValueTable::Iterator iter(g_context->m_globalNVTable);
318 for (; iter.valid(); iter.next()) {
319 auto const sd = iter.curKey();
320 auto const val = iter.curVal();
321 // Reified functions and functions with coeffects rules
322 // have an internal variables
323 if (s_reified_generics_var.equal(sd) || s_coeffects_var.equal(sd)) {
324 continue;
326 ret.set(StrNR(sd).asString(), Variant{const_variant_ref{val}});
329 // Make result independent of the hashtable implementation.
330 ArrayData* sorted = ret->escalateForSort(SORTFUNC_KSORT);
331 assertx(sorted == ret.get() ||
332 sorted->empty() ||
333 sorted->hasExactlyOneRef());
334 SCOPE_EXIT {
335 if (sorted != ret.get()) {
336 ret = Array::attach(sorted);
339 sorted->ksort(0, true);
341 return ret;
344 //=============================================================================
345 // Stack.
347 // Store actual stack elements array in a thread-local in order to amortize the
348 // cost of allocation.
349 struct StackElms {
350 ~StackElms() { free(m_elms); }
351 TypedValue* elms() {
352 if (m_elms == nullptr) {
353 // RuntimeOption::EvalVMStackElms-sized and -aligned.
354 size_t algnSz = RuntimeOption::EvalVMStackElms * sizeof(TypedValue);
355 if (posix_memalign((void**)&m_elms, algnSz, algnSz) != 0) {
356 throw std::runtime_error(
357 std::string("VM stack initialization failed: ") +
358 folly::errnoStr(errno).c_str());
360 madvise(m_elms, algnSz, MADV_DONTNEED);
362 return m_elms;
364 void flush() {
365 if (m_elms != nullptr) {
366 size_t algnSz = RuntimeOption::EvalVMStackElms * sizeof(TypedValue);
367 madvise(m_elms, algnSz, MADV_DONTNEED);
370 private:
371 TypedValue* m_elms{nullptr};
373 THREAD_LOCAL_FLAT(StackElms, t_se);
375 const int Stack::sSurprisePageSize = sysconf(_SC_PAGESIZE);
376 // We reserve the bottom page of each stack for use as the surprise
377 // page, so the minimum useful stack size is the next power of two.
378 const uint32_t Stack::sMinStackElms =
379 2 * sSurprisePageSize / sizeof(TypedValue);
381 void Stack::ValidateStackSize() {
382 if (RuntimeOption::EvalVMStackElms < sMinStackElms) {
383 throw std::runtime_error(folly::sformat(
384 "VM stack size of {:#x} is below the minimum of {:#x}",
385 RuntimeOption::EvalVMStackElms,
386 sMinStackElms
389 if (!folly::isPowTwo(RuntimeOption::EvalVMStackElms)) {
390 throw std::runtime_error(folly::sformat(
391 "VM stack size of {:#x} is not a power of 2",
392 RuntimeOption::EvalVMStackElms
397 Stack::Stack()
398 : m_elms(nullptr), m_top(nullptr), m_base(nullptr) {
401 Stack::~Stack() {
402 requestExit();
405 void Stack::requestInit() {
406 m_elms = t_se->elms();
407 // Burn one element of the stack, to satisfy the constraint that
408 // valid m_top values always have the same high-order (>
409 // log(RuntimeOption::EvalVMStackElms)) bits.
410 m_top = m_base = m_elms + RuntimeOption::EvalVMStackElms - 1;
412 rds::header()->stackLimitAndSurprise.store(
413 reinterpret_cast<uintptr_t>(
414 reinterpret_cast<char*>(m_elms) + sSurprisePageSize +
415 kStackCheckPadding * sizeof(TypedValue)
417 std::memory_order_release
419 assertx(!(rds::header()->stackLimitAndSurprise.load() & kSurpriseFlagMask));
421 // Because of the surprise page at the bottom of the stack we lose an
422 // additional 256 elements which must be taken into account when checking for
423 // overflow.
424 UNUSED size_t maxelms =
425 RuntimeOption::EvalVMStackElms - sSurprisePageSize / sizeof(TypedValue);
426 assertx(!wouldOverflow(maxelms - 1));
427 assertx(wouldOverflow(maxelms));
430 void Stack::requestExit() {
431 m_elms = nullptr;
434 void flush_evaluation_stack() {
435 if (vmStack().isAllocated()) {
436 // For RPCRequestHandler threads, the ExecutionContext can stay
437 // alive across requests, but its always ok to kill it between
438 // requests, so do so now
439 RPCRequestHandler::cleanupState();
442 tl_heap->flush();
444 if (!t_se.isNull()) {
445 t_se->flush();
447 rds::flush();
448 json_parser_flush_caches();
450 always_assert(tl_heap->empty());
453 static std::string toStringElm(TypedValue tv) {
454 std::ostringstream os;
456 if (!isRealType(tv.m_type)) {
457 os << " ??? type " << static_cast<data_type_t>(tv.m_type) << "\n";
458 return os.str();
460 if (isRefcountedType(tv.m_type) &&
461 !tv.m_data.pcnt->checkCount()) {
462 // OK in the invoking frame when running a destructor.
463 os << " ??? inner_count " << tvGetCount(tv) << " ";
464 return os.str();
467 auto print_count = [&] {
468 if (tv.m_data.pcnt->isStatic()) {
469 os << ":c(static)";
470 } else if (tv.m_data.pcnt->isUncounted()) {
471 os << ":c(uncounted)";
472 } else {
473 os << ":c(" << tvGetCount(tv) << ")";
477 os << "C:";
479 do {
480 switch (tv.m_type) {
481 case KindOfUninit:
482 os << "Uninit";
483 continue;
484 case KindOfNull:
485 os << "Null";
486 continue;
487 case KindOfBoolean:
488 os << (tv.m_data.num ? "True" : "False");
489 continue;
490 case KindOfInt64:
491 os << "0x" << std::hex << tv.m_data.num << std::dec;
492 continue;
493 case KindOfDouble:
494 os << tv.m_data.dbl;
495 continue;
496 case KindOfPersistentString:
497 case KindOfString:
499 int len = tv.m_data.pstr->size();
500 bool truncated = false;
501 if (len > 128) {
502 len = 128;
503 truncated = true;
505 os << tv.m_data.pstr;
506 print_count();
507 os << ":\""
508 << escapeStringForCPP(tv.m_data.pstr->data(), len)
509 << "\"" << (truncated ? "..." : "");
511 continue;
512 case KindOfPersistentVec:
513 case KindOfVec:
514 assertx(tv.m_data.parr->isVecType());
515 assertx(tv.m_data.parr->checkCount());
516 os << tv.m_data.parr;
517 print_count();
518 os << ":Vec";
519 continue;
520 case KindOfPersistentDict:
521 case KindOfDict:
522 assertx(tv.m_data.parr->isDictType());
523 assertx(tv.m_data.parr->checkCount());
524 os << tv.m_data.parr;
525 print_count();
526 os << ":Dict";
527 continue;
528 case KindOfPersistentKeyset:
529 case KindOfKeyset:
530 assertx(tv.m_data.parr->isKeysetType());
531 assertx(tv.m_data.parr->checkCount());
532 os << tv.m_data.parr;
533 print_count();
534 os << ":Keyset";
535 continue;
536 case KindOfObject:
537 assertx(tv.m_data.pobj->checkCount());
538 os << tv.m_data.pobj;
539 print_count();
540 os << ":Object("
541 << tv.m_data.pobj->getClassName().get()->data()
542 << ")";
543 continue;
544 case KindOfResource:
545 assertx(tv.m_data.pres->checkCount());
546 os << tv.m_data.pres;
547 print_count();
548 os << ":Resource("
549 << tv.m_data.pres->data()->o_getClassName().get()->data()
550 << ")";
551 continue;
552 case KindOfRFunc: // TODO(T63348446) serialize the reified generics
553 assertx(tv.m_data.prfunc->checkCount());
554 os << tv.m_data.prfunc;
555 print_count();
556 os << ":RFunc("
557 << tv.m_data.prfunc->m_func->fullName()->data()
558 << ")<"
559 << tv.m_data.prfunc->m_arr
560 << ">";
561 continue;
562 case KindOfFunc:
563 os << ":Func("
564 << tv.m_data.pfunc->fullName()->data()
565 << ")";
566 continue;
567 case KindOfClass:
568 os << ":Class("
569 << tv.m_data.pclass->name()->data()
570 << ")";
571 continue;
572 case KindOfLazyClass:
573 os << ":LClass("
574 << tv.m_data.plazyclass.name()->data()
575 << ")";
576 continue;
577 case KindOfClsMeth:
578 os << ":ClsMeth("
579 << tv.m_data.pclsmeth->getCls()->name()->data()
580 << ", "
581 << tv.m_data.pclsmeth->getFunc()->fullName()->data()
582 << ")";
583 continue;
584 case KindOfRClsMeth:
585 os << ":RClsMeth("
586 << tv.m_data.prclsmeth->m_cls->name()->data()
587 << ", "
588 << tv.m_data.prclsmeth->m_func->fullName()->data()
589 << ")<"
590 << tv.m_data.prclsmeth->m_arr
591 << ">";
592 continue;
594 not_reached();
595 } while (0);
597 return os.str();
601 * Return true if Offset o is inside the protected region of a fault
602 * funclet for iterId, otherwise false.
604 static bool checkIterScope(const Func* f, Offset o, Id iterId) {
605 assertx(o >= 0 && o < f->bclen());
606 for (auto const& eh : f->ehtab()) {
607 if (eh.m_base <= o && o < eh.m_past &&
608 eh.m_iterId == iterId) {
609 return true;
612 return false;
615 static void toStringFrame(std::ostream& os, const ActRec* fp,
616 int offset, const TypedValue* ftop,
617 const std::string& prefix, bool isTop = true) {
618 assertx(fp);
620 // Use depth-first recursion to output the most deeply nested stack frame
621 // first.
623 Offset prevPc = 0;
624 TypedValue* prevStackTop = nullptr;
625 ActRec* prevFp = g_context->getPrevVMState(fp, &prevPc, &prevStackTop);
626 if (prevFp != nullptr) {
627 toStringFrame(os, prevFp, prevPc, prevStackTop, prefix, false);
631 os << prefix;
632 const Func* func = fp->func();
633 assertx(func);
634 func->validate();
635 std::string funcName(func->fullName()->data());
636 os << "{func:" << funcName
637 << ",callOff:" << fp->callOffset()
638 << ",this:0x"
639 << std::hex << (func->cls() && fp->hasThis() ? fp->getThis() : nullptr)
640 << std::dec << "}";
642 if (func->numLocals() > 0) {
643 // Don't print locals for parent frames on a Ret(C|V) since some of them
644 // may already be destructed.
645 if (isRet(func->getOp(offset)) && !isTop) {
646 os << "<locals destroyed>";
647 } else {
648 os << "<";
649 int n = func->numLocals();
650 for (int i = 0; i < n; i++) {
651 if (i > 0) {
652 os << " ";
654 os << toStringElm(*frame_local(fp, i));
656 os << ">";
660 if (func->numIterators() > 0) {
661 os << "|";
662 for (int i = 0; i < func->numIterators(); i++) {
663 if (i > 0) {
664 os << " ";
666 if (checkIterScope(func, offset, i)) {
667 os << frame_iter(fp, i)->toString();
668 } else {
669 os << "I:Undefined";
672 os << "|";
675 std::vector<std::string> stackElems;
676 visitStackElems(
677 fp, ftop,
678 [&](const TypedValue* tv) {
679 stackElems.push_back(toStringElm(*tv));
682 std::reverse(stackElems.begin(), stackElems.end());
683 os << ' ' << folly::join(' ', stackElems);
685 os << '\n';
688 std::string Stack::toString(const ActRec* fp, int offset,
689 const std::string prefix/* = "" */) const {
690 // The only way to figure out which stack elements are activation records is
691 // to follow the frame chain. However, the goal for each stack frame is to
692 // print stack fragments from deepest to shallowest -- a then b in the
693 // following example:
695 // {func:foo,callOff:51}<C:8> {func:bar} C:8 C:1 {func:biz} C:0
696 // aaaaaaaaaaaaaaaaaa bbbbbbbbbbbbbb
698 // Use depth-first recursion to get the output order correct.
700 std::ostringstream os;
701 auto unit = fp->unit();
702 auto func = fp->func();
703 os << prefix << "=== Stack at "
704 << unit->filepath()->data() << ":"
705 << func->getLineNumber(func->offsetOf(vmpc()))
706 << " func " << func->fullName()->data() << " ===\n";
708 toStringFrame(os, fp, offset, m_top, prefix);
710 return os.str();
713 bool Stack::wouldOverflow(int numCells) const {
714 // The funny approach here is to validate the translator's assembly
715 // technique. We've aligned and sized the stack so that the high order
716 // bits of valid cells are all the same. In the translator, numCells
717 // can be hardcoded, and m_top is wired into a register,
718 // so the expression requires no loads.
719 intptr_t truncatedTop = intptr_t(m_top) / sizeof(TypedValue);
720 truncatedTop &= RuntimeOption::EvalVMStackElms - 1;
721 intptr_t diff = truncatedTop - numCells -
722 sSurprisePageSize / sizeof(TypedValue);
723 return diff < 0;
726 TypedValue* Stack::anyFrameStackBase(const ActRec* fp) {
727 return isResumed(fp) ? Stack::resumableStackBase(fp)
728 : Stack::frameStackBase(fp);
731 TypedValue* Stack::frameStackBase(const ActRec* fp) {
732 assertx(!isResumed(fp));
733 return (TypedValue*)fp - fp->func()->numSlotsInFrame();
736 TypedValue* Stack::resumableStackBase(const ActRec* fp) {
737 assertx(isResumed(fp));
738 auto sfp = fp->sfp();
739 if (sfp) {
740 // The non-reentrant case occurs when a non-async or async generator is
741 // resumed via ContEnter or ContRaise opcode. These opcodes leave a single
742 // value on the stack that becomes part of the generator's stack. So we
743 // find the caller's FP, compensate for its locals and iterators, and then
744 // we've found the base of the generator's stack.
745 assertx(fp->func()->isGenerator());
746 assertx(!sfp->func()->isGenerator());
747 return (TypedValue*)sfp - sfp->func()->numSlotsInFrame();
748 } else {
749 // The reentrant case occurs when asio scheduler resumes an async function
750 // or async generator. We simply use the top of stack of the previous VM
751 // frame (since the ActRec, locals, and iters for this frame do not reside
752 // on the VM stack).
753 assertx(fp->func()->isAsync());
754 TypedValue* prevSp;
755 DEBUG_ONLY auto const prevFp =
756 g_context.getNoCheck()->getPrevVMState(fp, nullptr, &prevSp);
757 assertx(prevFp != nullptr);
758 return prevSp;
762 Array getDefinedVariables(const ActRec* fp) {
763 if (UNLIKELY(fp == nullptr || fp->isInlined())) return empty_dict_array();
764 auto const func = fp->func();
765 auto const numLocals = func->numNamedLocals();
766 DictInit ret(numLocals);
767 for (Id id = 0; id < numLocals; ++id) {
768 auto const local = frame_local(fp, id);
769 if (type(local) == KindOfUninit) {
770 continue;
772 auto const localNameSd = func->localVarName(id);
773 if (!localNameSd) continue;
774 // this is basically just a convoluted const_cast :p
775 Variant name(localNameSd, Variant::PersistentStrInit{});
776 ret.set(name.getStringData(), Variant{variant_ref{local}});
778 return ret.toArray();
781 // Unpack or repack arguments as needed to match the function signature.
782 // The stack contains numArgs arguments plus an extra cell containing
783 // arguments to unpack.
784 uint32_t prepareUnpackArgs(const Func* func, uint32_t numArgs,
785 bool checkInOutAnnot) {
786 auto& stack = vmStack();
787 auto unpackArgs = *stack.topC();
788 if (!isContainer(unpackArgs)) throwInvalidUnpackArgs();
789 stack.discard();
790 SCOPE_EXIT { tvDecRefGen(unpackArgs); };
792 auto const numUnpackArgs = getContainerSize(unpackArgs);
793 auto const numParams = func->numNonVariadicParams();
794 if (LIKELY(numArgs == numParams)) {
795 // Convert unpack args to the proper type.
796 tvCastToVecInPlace(&unpackArgs);
797 stack.pushVec(unpackArgs.m_data.parr);
798 return numParams + 1;
801 ArrayIter iter(unpackArgs);
802 if (LIKELY(numArgs < numParams)) {
803 for (auto i = numArgs; iter && (i < numParams); ++i, ++iter) {
804 if (UNLIKELY(checkInOutAnnot && func->isInOut(i))) {
805 throwParamInOutMismatch(func, i);
807 auto const from = iter.secondValPlus();
808 tvDup(from, *stack.allocTV());
811 if (LIKELY(!iter)) {
812 // argArray was exhausted, so there are no "extra" arguments but there
813 // may be a deficit of non-variadic arguments, and the need to push an
814 // empty array for the variadic argument ... that work is left to
815 // prepareFuncEntry.
816 assertx(numArgs + numUnpackArgs <= numParams);
817 return numArgs + numUnpackArgs;
821 // there are "extra" arguments; passed as standard arguments prior to the
822 // ... unpack operator and/or still remaining in argArray
823 assertx(numArgs + numUnpackArgs > numParams);
824 assertx(numArgs > numParams || !!iter);
826 auto const numNewUnpackArgs = numArgs + numUnpackArgs - numParams;
827 VecInit ai(numNewUnpackArgs);
828 if (UNLIKELY(numArgs > numParams)) {
829 // The arguments are pushed in order, so we should start from the bottom.
830 auto ptr = stack.indTV(numArgs - numParams);
831 for (auto i = numParams; i < numArgs; ++i) {
832 ai.append(*--ptr);
834 for (auto i = numParams; i < numArgs; ++i) {
835 stack.popTV();
838 for (; iter; ++iter) {
839 ai.append(iter.secondValPlus());
841 auto const ad = ai.create();
842 assertx(ad->hasExactlyOneRef());
843 assertx(ad->size() == numNewUnpackArgs);
844 stack.pushArrayLikeNoRc(ad);
845 return numParams + 1;
848 static void prepareFuncEntry(ActRec *ar, uint32_t numArgsInclUnpack) {
849 assertx(!isResumed(ar));
850 assertx(
851 reinterpret_cast<TypedValue*>(ar) - vmStack().top() ==
852 ar->func()->numParams()
853 + (ar->func()->hasReifiedGenerics() ? 1U : 0U)
854 + (ar->func()->hasCoeffectsLocal() ? 1U : 0U)
857 // +- Order Of Stack-------+
858 // | arguments |
859 // | reified generics |
860 // | coeffects |
861 // | closure use variables |
862 // | all other locals |
863 // +-----------------------+
865 const Func* func = ar->func();
866 int nlocals = func->numParams();
868 if (ar->func()->hasReifiedGenerics()) {
869 // Currently does not work with closures
870 assertx(!func->isClosureBody());
871 assertx(func->reifiedGenericsLocalId() == nlocals);
872 nlocals++;
875 if (ar->func()->hasCoeffectsLocal()) {
876 assertx(func->coeffectsLocalId() == nlocals);
877 nlocals++;
880 if (UNLIKELY(func->isClosureBody())) {
881 int nuse = c_Closure::initActRecFromClosure(ar, vmStack().top());
882 // initActRecFromClosure doesn't move stack
883 vmStack().nalloc(nuse);
884 nlocals += nuse;
885 func = ar->func();
888 pushFrameSlots(func, nlocals);
890 vmfp() = ar;
891 vmpc() = func->entry() + func->getEntryForNumArgs(numArgsInclUnpack);
892 vmJitReturnAddr() = nullptr;
895 static void dispatch();
897 void enterVMAtFunc(ActRec* enterFnAr, uint32_t numArgsInclUnpack) {
898 assertx(enterFnAr);
899 assertx(!isResumed(enterFnAr));
900 Stats::inc(Stats::VMEnter);
902 prepareFuncEntry(enterFnAr, numArgsInclUnpack);
904 if (
905 !EventHook::FunctionCall(
906 enterFnAr,
907 EventHook::NormalFunc,
908 EventHook::Source::Native
911 return;
913 checkStack(vmStack(), enterFnAr->func(), 0);
914 assertx(vmfp()->func()->contains(vmpc()));
916 if (RID().getJit() && !RID().getJitFolding()) {
917 jit::TCA start = jit::svcreq::getFuncEntry(enterFnAr->func());
918 assert_flog(jit::tc::isValidCodeAddress(start),
919 "start = {} ; func = {} ({})\n",
920 start, enterFnAr->func(), enterFnAr->func()->fullName());
921 jit::enterTC(start);
922 } else {
923 funcEntry();
924 dispatch();
928 void enterVMAtCurPC() {
929 assertx(vmfp());
930 assertx(vmpc());
931 assertx(vmfp()->func()->contains(vmpc()));
932 Stats::inc(Stats::VMEnter);
933 if (RID().getJit()) {
934 jit::enterTC(jit::tc::ustubs().resumeHelper);
935 } else {
936 dispatch();
941 * Helper for function entry, including pseudo-main entry.
943 void pushFrameSlots(const Func* func, int nparams /*= 0*/) {
944 // Push locals.
945 for (int i = nparams; i < func->numLocals(); i++) {
946 vmStack().pushUninit();
948 // Push iterators.
949 for (int i = 0; i < func->numIterators(); i++) {
950 vmStack().allocI();
954 static inline StringData* lookup_name(tv_rval key) {
955 return prepareKey(*key);
958 static inline tv_lval lookup_gbl(ActRec* /*fp*/, StringData*& name,
959 tv_rval key) {
960 name = lookup_name(key);
961 assertx(g_context->m_globalNVTable);
962 return g_context->m_globalNVTable->lookup(name);
965 static inline tv_lval lookupd_gbl(ActRec* /*fp*/, StringData*& name,
966 tv_rval key) {
967 name = lookup_name(key);
968 assertx(g_context->m_globalNVTable);
969 auto env = g_context->m_globalNVTable;
970 auto val = env->lookup(name);
971 if (!val) {
972 TypedValue tv;
973 tvWriteNull(tv);
974 env->set(name, &tv);
975 val = env->lookup(name);
977 return val;
980 static inline void lookup_sprop(ActRec* fp,
981 Class* cls,
982 StringData*& name,
983 TypedValue* key,
984 TypedValue*& val,
985 Slot& slot,
986 bool& visible,
987 bool& accessible,
988 bool& constant,
989 bool& readonly,
990 bool ignoreLateInit) {
991 name = lookup_name(key);
992 auto const ctx = arGetContextClass(fp);
994 auto const lookup = ignoreLateInit
995 ? cls->getSPropIgnoreLateInit(ctx, name)
996 : cls->getSProp(ctx, name);
998 val = lookup.val;
999 slot = lookup.slot;
1000 visible = lookup.val != nullptr;
1001 constant = lookup.constant;
1002 readonly = lookup.readonly;
1003 accessible = lookup.accessible;
1006 static inline Class* lookupClsRef(TypedValue* input) {
1007 Class* class_ = nullptr;
1008 if (isStringType(input->m_type) || isLazyClassType(input->m_type)) {
1009 auto const cname = isStringType(input->m_type) ?
1010 input->m_data.pstr : input->m_data.plazyclass.name();
1011 class_ = Class::load(cname);
1012 if (class_ == nullptr) {
1013 raise_error(Strings::UNKNOWN_CLASS, cname->data());
1015 } else if (input->m_type == KindOfObject) {
1016 class_ = input->m_data.pobj->getVMClass();
1017 } else if (isClassType(input->m_type)) {
1018 class_ = input->m_data.pclass;
1019 } else {
1020 raise_error("Cls: Expected string or object, got %s",
1021 describe_actual_type(input).c_str());
1023 return class_;
1026 static UNUSED int innerCount(TypedValue tv) {
1027 return isRefcountedType(tv.m_type) ? tvGetCount(tv) : -1;
1031 * One iop* function exists for every bytecode. They all take a single PC&
1032 * argument, which should be left pointing to the next bytecode to execute when
1033 * the instruction is complete. Most return void, though a few return a
1034 * jit::TCA. The ones that return a TCA return a non-nullptr value to indicate
1035 * that the caller must resume execution in the TC at the returned
1036 * address. This is used to maintain certain invariants about how we get into
1037 * and out of VM frames in jitted code; see comments on jitReturnPre() for more
1038 * details.
1041 OPTBLD_INLINE void iopNop() {
1044 OPTBLD_INLINE void iopEntryNop() {
1047 OPTBLD_INLINE void iopPopC() {
1048 vmStack().popC();
1051 OPTBLD_INLINE void iopPopU() {
1052 vmStack().popU();
1055 OPTBLD_INLINE void iopPopU2() {
1056 assertx(vmStack().indC(1)->m_type == KindOfUninit);
1057 *vmStack().indC(1) = *vmStack().topC();
1058 vmStack().discard();
1061 OPTBLD_INLINE void iopPopL(tv_lval to) {
1062 TypedValue* fr = vmStack().topC();
1063 tvMove(*fr, to);
1064 vmStack().discard();
1067 OPTBLD_INLINE void iopDup() {
1068 vmStack().dup();
1071 OPTBLD_INLINE void iopCGetCUNop() {
1074 OPTBLD_INLINE void iopUGetCUNop() {
1077 OPTBLD_INLINE void iopNull() {
1078 vmStack().pushNull();
1081 OPTBLD_INLINE void iopNullUninit() {
1082 vmStack().pushNullUninit();
1085 OPTBLD_INLINE void iopTrue() {
1086 vmStack().pushBool(true);
1089 OPTBLD_INLINE void iopFalse() {
1090 vmStack().pushBool(false);
1093 OPTBLD_INLINE void iopFile() {
1094 if (auto const of = vmfp()->func()->originalFilename()) {
1095 vmStack().pushStaticString(of);
1096 return;
1098 auto s = vmfp()->func()->unit()->filepath();
1099 vmStack().pushStaticString(s);
1102 OPTBLD_INLINE void iopDir() {
1103 auto const filepath = vmfp()->func()->unit()->filepath();
1104 vmStack().pushStaticString(
1105 makeStaticString(FileUtil::dirname(StrNR{filepath}))
1109 OPTBLD_INLINE void iopMethod() {
1110 auto s = vmfp()->func()->fullName();
1111 vmStack().pushStaticString(s);
1114 OPTBLD_INLINE void iopFuncCred() {
1115 vmStack().pushObjectNoRc(
1116 FunctionCredential::newInstance(vmfp()->func()));
1119 OPTBLD_INLINE void iopClassName() {
1120 auto const cls = vmStack().topC();
1121 if (!isClassType(cls->m_type)) {
1122 raise_error("Attempting to get name of non-class");
1124 vmStack().replaceC<KindOfPersistentString>(
1125 cls->m_data.pclass->name()
1129 OPTBLD_INLINE void iopLazyClassFromClass() {
1130 auto const cls = vmStack().topC();
1131 if (!isClassType(cls->m_type)) {
1132 raise_error("Attempting to get name of non-class");
1134 auto const cname = cls->m_data.pclass->name();
1135 auto const lclass = LazyClassData::create(cname);
1136 vmStack().replaceC<KindOfLazyClass>(lclass);
1139 OPTBLD_INLINE void iopInt(int64_t imm) {
1140 vmStack().pushInt(imm);
1143 OPTBLD_INLINE void iopDouble(double imm) {
1144 vmStack().pushDouble(imm);
1147 OPTBLD_INLINE void iopString(const StringData* s) {
1148 vmStack().pushStaticString(s);
1151 OPTBLD_INLINE void iopVec(const ArrayData* a) {
1152 assertx(a->isVecType());
1153 vmStack().pushStaticVec(bespoke::maybeMakeLoggingArray(a));
1156 OPTBLD_INLINE void iopDict(const ArrayData* a) {
1157 assertx(a->isDictType());
1158 vmStack().pushStaticDict(bespoke::maybeMakeLoggingArray(a));
1161 OPTBLD_INLINE void iopKeyset(const ArrayData* a) {
1162 assertx(a->isKeysetType());
1163 vmStack().pushStaticKeyset(bespoke::maybeMakeLoggingArray(a));
1166 OPTBLD_INLINE void iopNewDictArray(uint32_t capacity) {
1167 auto const ad = capacity ? VanillaDict::MakeReserveDict(capacity)
1168 : ArrayData::CreateDict();
1169 vmStack().pushDictNoRc(bespoke::maybeMakeLoggingArray(ad));
1172 namespace {
1174 template <typename F>
1175 ArrayData* newStructArrayImpl(imm_array<int32_t> ids, F f) {
1176 auto const n = ids.size;
1177 assertx(n > 0 && n <= ArrayData::MaxElemsOnStack);
1178 req::vector<const StringData*> names;
1179 names.reserve(n);
1180 auto unit = vmfp()->func()->unit();
1181 for (size_t i = 0; i < n; ++i) {
1182 auto name = unit->lookupLitstrId(ids[i]);
1183 names.push_back(name);
1186 // This constructor moves values, no inc/decref is necessary.
1187 auto const a = f(n, names.data(), vmStack().topC())->asArrayData();
1188 vmStack().ndiscard(n);
1189 return a;
1194 OPTBLD_INLINE void iopNewStructDict(imm_array<int32_t> ids) {
1195 auto const ad = newStructArrayImpl(ids, VanillaDict::MakeStructDict);
1196 vmStack().pushDictNoRc(bespoke::maybeMakeLoggingArray(ad));
1199 OPTBLD_INLINE void iopNewVec(uint32_t n) {
1200 // This constructor moves values, no inc/decref is necessary.
1201 auto const ad = VanillaVec::MakeVec(n, vmStack().topC());
1202 vmStack().ndiscard(n);
1203 vmStack().pushVecNoRc(bespoke::maybeMakeLoggingArray(ad));
1206 OPTBLD_INLINE void iopNewKeysetArray(uint32_t n) {
1207 // This constructor moves values, no inc/decref is necessary.
1208 auto const ad = VanillaKeyset::MakeSet(n, vmStack().topC());
1209 vmStack().ndiscard(n);
1210 vmStack().pushKeysetNoRc(bespoke::maybeMakeLoggingArray(ad));
1213 OPTBLD_INLINE void iopAddElemC() {
1214 TypedValue* c1 = vmStack().topC();
1215 auto key = tvClassToString(*vmStack().indC(1));
1216 TypedValue* c3 = vmStack().indC(2);
1217 if (!tvIsDict(c3)) {
1218 raise_error("AddElemC: $3 must be an array or dict");
1220 tvAsVariant(*c3).asArrRef().set(tvAsCVarRef(key), tvAsCVarRef(c1));
1221 assertx(tvIsPlausible(*c3));
1222 vmStack().popC();
1223 vmStack().popC();
1226 OPTBLD_INLINE void iopAddNewElemC() {
1227 TypedValue* c1 = vmStack().topC();
1228 TypedValue* c2 = vmStack().indC(1);
1229 if (!tvIsVec(c2) && !tvIsKeyset(c2)) {
1230 raise_error("AddNewElemC: $2 must be an varray, vec, or keyset");
1232 tvAsVariant(*c2).asArrRef().append(tvAsCVarRef(c1));
1233 assertx(tvIsPlausible(*c2));
1234 vmStack().popC();
1237 OPTBLD_INLINE void iopNewCol(CollectionType cType) {
1238 assertx(cType != CollectionType::Pair);
1239 // Incref the collection object during construction.
1240 auto obj = collections::alloc(cType);
1241 vmStack().pushObjectNoRc(obj);
1244 OPTBLD_INLINE void iopNewPair() {
1245 TypedValue* c1 = vmStack().topC();
1246 TypedValue* c2 = vmStack().indC(1);
1247 // elements were pushed onto the stack in the order they should appear
1248 // in the pair, so the top of the stack should become the second element
1249 auto pair = collections::allocPair(*c2, *c1);
1250 // This constructor moves values, no inc/decref is necessary.
1251 vmStack().ndiscard(2);
1252 vmStack().pushObjectNoRc(pair);
1255 OPTBLD_INLINE void iopColFromArray(CollectionType cType) {
1256 assertx(cType != CollectionType::Pair);
1257 auto const c1 = vmStack().topC();
1258 if (cType == CollectionType::Vector || cType == CollectionType::ImmVector) {
1259 if (UNLIKELY(!isVecType(c1->m_type))) {
1260 raise_error("ColFromArray: $1 must be a Vec when creating an "
1261 "(Imm)Vector");
1263 } else if (UNLIKELY(!isDictType(c1->m_type))) {
1264 raise_error("ColFromArray: $1 must be a Dict when creating an (Imm)Set "
1265 "or an (Imm)Map");
1267 // This constructor reassociates the ArrayData with the collection, so no
1268 // inc/decref is needed for the array. The collection object itself is
1269 // increfed.
1270 auto obj = collections::alloc(cType, c1->m_data.parr);
1271 vmStack().discard();
1272 vmStack().pushObjectNoRc(obj);
1275 OPTBLD_INLINE void iopCnsE(const StringData* s) {
1276 auto const cns = Constant::load(s);
1277 if (type(cns) == KindOfUninit) {
1278 raise_error("Undefined constant '%s'", s->data());
1280 auto const c1 = vmStack().allocC();
1281 tvCopy(cns, *c1);
1284 OPTBLD_INLINE void iopClsCns(const StringData* clsCnsName) {
1285 auto const clsTV = vmStack().topC();
1286 if (!isClassType(clsTV->m_type)) {
1287 raise_error("Attempting class constant access on non-class");
1290 auto const cls = clsTV->m_data.pclass;
1291 auto const clsCns = cls->clsCnsGet(clsCnsName);
1293 if (clsCns.m_type == KindOfUninit) {
1294 raise_error("Couldn't find constant %s::%s",
1295 cls->name()->data(), clsCnsName->data());
1298 tvDup(clsCns, *clsTV);
1301 OPTBLD_INLINE void iopClsCnsD(const StringData* clsCnsName, Id classId) {
1302 const NamedEntityPair& classNamedEntity =
1303 vmfp()->func()->unit()->lookupNamedEntityPairId(classId);
1304 auto const clsCns = g_context->lookupClsCns(classNamedEntity.second,
1305 classNamedEntity.first, clsCnsName);
1306 auto const c1 = vmStack().allocC();
1307 tvDup(clsCns, *c1);
1310 OPTBLD_INLINE void iopClsCnsL(tv_lval local) {
1311 auto const clsTV = vmStack().topC();
1312 if (!isClassType(clsTV->m_type)) {
1313 raise_error("Attempting class constant access on non-class");
1315 auto const cls = clsTV->m_data.pclass;
1316 if (!isStringType(type(local))) {
1317 raise_error("String expected for %s constant", cls->name()->data());
1319 auto const clsCnsName = val(local).pstr;
1320 auto const clsCns = cls->clsCnsGet(clsCnsName);
1321 if (clsCns.m_type == KindOfUninit) {
1322 raise_error("Couldn't find constant %s::%s",
1323 cls->name()->data(), clsCnsName->data());
1325 tvSet(clsCns, *clsTV);
1328 String toStringWithNotice(const Variant& c) {
1329 static ConvNoticeLevel notice_level =
1330 flagToConvNoticeLevel(RuntimeOption::EvalNoticeOnCoerceForStrConcat);
1331 return c.toString(notice_level, s_ConvNoticeReasonConcat.get());
1334 OPTBLD_INLINE void iopConcat() {
1335 auto const c1 = vmStack().topC();
1336 auto const c2 = vmStack().indC(1);
1337 auto const s2 = toStringWithNotice(tvAsVariant(*c2));
1338 auto const s1 = toStringWithNotice(tvAsCVarRef(*c1));
1339 tvAsVariant(*c2) = concat(s2, s1);
1340 assertx(c2->m_data.pstr->checkCount());
1341 vmStack().popC();
1344 OPTBLD_INLINE void iopConcatN(uint32_t n) {
1345 auto const c1 = vmStack().topC();
1346 auto const c2 = vmStack().indC(1);
1347 auto const s1 = toStringWithNotice(tvAsCVarRef(*c1));
1349 if (n == 2) {
1350 auto const s2 = toStringWithNotice(tvAsVariant(*c2));
1351 tvAsVariant(*c2) = concat(s2, s1);
1352 assertx(c2->m_data.pstr->checkCount());
1353 } else if (n == 3) {
1354 auto const c3 = vmStack().indC(2);
1355 auto const s3 = toStringWithNotice(tvAsVariant(*c3));
1356 auto const s2 = toStringWithNotice(tvAsCVarRef(*c2));
1357 tvAsVariant(*c3) = concat3(s3, s2, s1);
1358 assertx(c3->m_data.pstr->checkCount());
1359 } else {
1360 assertx(n == 4);
1361 auto const c3 = vmStack().indC(2);
1362 auto const c4 = vmStack().indC(3);
1363 auto const s4 = toStringWithNotice(tvAsVariant(*c4));
1364 auto const s3 = toStringWithNotice(tvAsCVarRef(*c3));
1365 auto const s2 = toStringWithNotice(tvAsCVarRef(*c2));
1366 tvAsVariant(*c4) = concat4(s4, s3, s2, s1);
1367 assertx(c4->m_data.pstr->checkCount());
1370 for (int i = 1; i < n; ++i) {
1371 vmStack().popC();
1375 OPTBLD_INLINE void iopNot() {
1376 TypedValue* c1 = vmStack().topC();
1377 tvAsVariant(*c1) = !tvAsVariant(*c1).toBoolean();
1380 template<class Fn>
1381 OPTBLD_INLINE void implTvBinOp(Fn fn) {
1382 auto const c1 = vmStack().topC();
1383 auto const c2 = vmStack().indC(1);
1384 auto const result = fn(*c2, *c1);
1385 tvDecRefGen(c2);
1386 *c2 = result;
1387 vmStack().popC();
1390 template<class Fn>
1391 OPTBLD_INLINE void implTvBinOpBool(Fn fn) {
1392 auto const c1 = vmStack().topC();
1393 auto const c2 = vmStack().indC(1);
1394 bool const result = fn(*c2, *c1);
1395 tvDecRefGen(c2);
1396 *c2 = make_tv<KindOfBoolean>(result);
1397 vmStack().popC();
1400 template<class Fn>
1401 OPTBLD_INLINE void implTvBinOpInt64(Fn fn) {
1402 auto const c1 = vmStack().topC();
1403 auto const c2 = vmStack().indC(1);
1404 auto const result = fn(*c2, *c1);
1405 tvDecRefGen(c2);
1406 *c2 = make_tv<KindOfInt64>(result);
1407 vmStack().popC();
1410 OPTBLD_INLINE void iopAdd() {
1411 implTvBinOp(tvAdd);
1414 OPTBLD_INLINE void iopSub() {
1415 implTvBinOp(tvSub);
1418 OPTBLD_INLINE void iopMul() {
1419 implTvBinOp(tvMul);
1422 OPTBLD_INLINE void iopAddO() {
1423 implTvBinOp(tvAddO);
1426 OPTBLD_INLINE void iopSubO() {
1427 implTvBinOp(tvSubO);
1430 OPTBLD_INLINE void iopMulO() {
1431 implTvBinOp(tvMulO);
1434 OPTBLD_INLINE void iopDiv() {
1435 implTvBinOp(tvDiv);
1438 OPTBLD_INLINE void iopPow() {
1439 implTvBinOp(tvPow);
1442 OPTBLD_INLINE void iopMod() {
1443 implTvBinOp(tvMod);
1446 OPTBLD_INLINE void iopBitAnd() {
1447 implTvBinOp(tvBitAnd);
1450 OPTBLD_INLINE void iopBitOr() {
1451 implTvBinOp(tvBitOr);
1454 OPTBLD_INLINE void iopBitXor() {
1455 implTvBinOp(tvBitXor);
1458 OPTBLD_INLINE void iopSame() {
1459 implTvBinOpBool(tvSame);
1462 OPTBLD_INLINE void iopNSame() {
1463 implTvBinOpBool([&] (TypedValue c1, TypedValue c2) {
1464 return !tvSame(c1, c2);
1468 OPTBLD_INLINE void iopEq() {
1469 implTvBinOpBool([&] (TypedValue c1, TypedValue c2) {
1470 return tvEqual(c1, c2);
1474 OPTBLD_INLINE void iopNeq() {
1475 implTvBinOpBool([&] (TypedValue c1, TypedValue c2) {
1476 return !tvEqual(c1, c2);
1480 OPTBLD_INLINE void iopLt() {
1481 implTvBinOpBool([&] (TypedValue c1, TypedValue c2) {
1482 return tvLess(c1, c2);
1486 OPTBLD_INLINE void iopLte() {
1487 implTvBinOpBool(tvLessOrEqual);
1490 OPTBLD_INLINE void iopGt() {
1491 implTvBinOpBool([&] (TypedValue c1, TypedValue c2) {
1492 return tvGreater(c1, c2);
1496 OPTBLD_INLINE void iopGte() {
1497 implTvBinOpBool(tvGreaterOrEqual);
1500 OPTBLD_INLINE void iopCmp() {
1501 implTvBinOpInt64([&] (TypedValue c1, TypedValue c2) {
1502 return tvCompare(c1, c2);
1506 OPTBLD_INLINE void iopShl() {
1507 implTvBinOp(tvShl);
1510 OPTBLD_INLINE void iopShr() {
1511 implTvBinOp(tvShr);
1514 OPTBLD_INLINE void iopBitNot() {
1515 tvBitNot(*vmStack().topC());
1518 OPTBLD_INLINE void iopCastBool() {
1519 TypedValue* c1 = vmStack().topC();
1520 tvCastToBooleanInPlace(c1);
1523 OPTBLD_INLINE void iopCastInt() {
1524 TypedValue* c1 = vmStack().topC();
1525 tvCastToInt64InPlace(c1);
1528 OPTBLD_INLINE void iopCastDouble() {
1529 TypedValue* c1 = vmStack().topC();
1530 tvCastToDoubleInPlace(c1);
1533 OPTBLD_INLINE void iopCastString() {
1534 TypedValue* c1 = vmStack().topC();
1535 tvCastToStringInPlace(c1);
1538 namespace {
1539 void maybeMakeLoggingArrayAfterCast(TypedValue* tv) {
1540 auto const oldArr = val(tv).parr;
1541 auto const newArr = bespoke::maybeMakeLoggingArray(oldArr);
1542 if (newArr == oldArr) return;
1543 val(tv).parr = newArr;
1544 type(tv) = dt_with_rc(type(tv));
1548 OPTBLD_INLINE void iopCastVec() {
1549 TypedValue* c1 = vmStack().topC();
1550 if (tvIsVec(c1)) return;
1551 tvCastToVecInPlace(c1);
1552 maybeMakeLoggingArrayAfterCast(c1);
1555 OPTBLD_INLINE void iopCastDict() {
1556 TypedValue* c1 = vmStack().topC();
1557 if (tvIsDict(c1)) return;
1558 tvCastToDictInPlace(c1);
1559 maybeMakeLoggingArrayAfterCast(c1);
1562 OPTBLD_INLINE void iopCastKeyset() {
1563 TypedValue* c1 = vmStack().topC();
1564 if (tvIsKeyset(c1)) return;
1565 tvCastToKeysetInPlace(c1);
1566 maybeMakeLoggingArrayAfterCast(c1);
1569 OPTBLD_INLINE void iopDblAsBits() {
1570 auto c = vmStack().topC();
1571 if (UNLIKELY(!isDoubleType(c->m_type))) {
1572 vmStack().replaceC<KindOfInt64>(0);
1573 return;
1575 c->m_type = KindOfInt64;
1578 ALWAYS_INLINE
1579 bool implInstanceOfHelper(const StringData* str1, TypedValue* c2) {
1580 const NamedEntity* rhs = NamedEntity::get(str1, false);
1581 // Because of other codepaths, an un-normalized name might enter the
1582 // table without a Class* so we need to check if it's there.
1583 if (LIKELY(rhs && rhs->getCachedClass() != nullptr)) {
1584 return tvInstanceOf(c2, rhs);
1586 return false;
1589 OPTBLD_INLINE void iopInstanceOf() {
1590 TypedValue* c1 = vmStack().topC(); // c2 instanceof c1
1591 TypedValue* c2 = vmStack().indC(1);
1592 bool r = false;
1593 if (isStringType(c1->m_type)) {
1594 r = implInstanceOfHelper(c1->m_data.pstr, c2);
1595 } else if (c1->m_type == KindOfObject) {
1596 if (c2->m_type == KindOfObject) {
1597 ObjectData* lhs = c2->m_data.pobj;
1598 ObjectData* rhs = c1->m_data.pobj;
1599 r = lhs->instanceof(rhs->getVMClass());
1601 } else if (isClassType(c1->m_type)) {
1602 // TODO (T29639296) Exploit class pointer further
1603 r = implInstanceOfHelper(c1->m_data.pclass->name(), c2);
1604 } else {
1605 raise_error("Class name must be a valid object or a string");
1607 vmStack().popC();
1608 vmStack().replaceC<KindOfBoolean>(r);
1611 OPTBLD_INLINE void iopInstanceOfD(Id id) {
1612 const NamedEntity* ne = vmfp()->func()->unit()->lookupNamedEntityId(id);
1613 TypedValue* c1 = vmStack().topC();
1614 bool r = tvInstanceOf(c1, ne);
1615 vmStack().replaceC<KindOfBoolean>(r);
1618 OPTBLD_INLINE void iopIsLateBoundCls() {
1619 auto const cls = frameStaticClass(vmfp());
1620 if (!cls) {
1621 raise_error(HPHP::Strings::THIS_OUTSIDE_CLASS);
1623 if (isTrait(cls)) {
1624 raise_error("\"is\" and \"as\" operators cannot be used with a trait");
1626 auto const c1 = vmStack().topC();
1627 bool r = tvInstanceOf(c1, cls);
1628 vmStack().replaceC<KindOfBoolean>(r);
1631 namespace {
1633 ArrayData* resolveAndVerifyTypeStructureHelper(
1634 uint32_t n, const TypedValue* values, bool suppress, bool isOrAsOp) {
1635 Class* declaringCls = nullptr;
1636 Class* calledCls = nullptr;
1637 auto const v = *values;
1638 isValidTSType(v, true);
1639 if (typeStructureCouldBeNonStatic(v.m_data.parr)) {
1640 auto const frame = vmfp();
1641 if (frame && frame->func()) {
1642 declaringCls = frame->func()->cls();
1643 if (declaringCls) {
1644 calledCls = frame->hasClass()
1645 ? frame->getClass()
1646 : frame->getThis()->getVMClass();
1650 return jit::resolveTypeStructHelper(n, values, declaringCls,
1651 calledCls, suppress, isOrAsOp);
1654 ALWAYS_INLINE Array maybeResolveAndErrorOnTypeStructure(
1655 TypeStructResolveOp op,
1656 bool suppress
1658 auto const a = vmStack().topC();
1659 isValidTSType(*a, true);
1660 auto const arr = a->m_data.parr;
1662 if (op == TypeStructResolveOp::Resolve) {
1663 auto const result = resolveAndVerifyTypeStructureHelper(1, vmStack().topC(),
1664 suppress, true);
1665 if (arr == result) return ArrNR(arr);
1666 return Array::attach(result);
1669 errorOnIsAsExpressionInvalidTypes(ArrNR(arr), false);
1670 return ArrNR(arr);
1673 } // namespace
1675 OPTBLD_INLINE void iopIsTypeStructC(TypeStructResolveOp op) {
1676 auto const c = vmStack().indC(1);
1677 auto const ts = maybeResolveAndErrorOnTypeStructure(op, true);
1678 auto b = checkTypeStructureMatchesTV(ts, *c);
1679 vmStack().popC(); // pop c
1680 vmStack().replaceC<KindOfBoolean>(b);
1683 OPTBLD_INLINE void iopThrowAsTypeStructException() {
1684 auto const c = vmStack().indC(1);
1685 auto const ts =
1686 maybeResolveAndErrorOnTypeStructure(TypeStructResolveOp::Resolve, false);
1687 std::string givenType, expectedType, errorKey;
1688 if (!checkTypeStructureMatchesTV(ts, *c,
1689 givenType, expectedType, errorKey)) {
1690 vmStack().popC(); // pop c
1691 throwTypeStructureDoesNotMatchTVException(
1692 givenType, expectedType, errorKey);
1694 always_assert(false && "Invalid bytecode sequence: Instruction must throw");
1697 OPTBLD_INLINE void iopCombineAndResolveTypeStruct(uint32_t n) {
1698 assertx(n != 0);
1699 auto const resolved =
1700 resolveAndVerifyTypeStructureHelper(n, vmStack().topC(), false, false);
1701 vmStack().popC(); // pop the first TS
1702 vmStack().ndiscard(n-1);
1703 vmStack().pushArrayLike(resolved);
1706 OPTBLD_INLINE void iopRecordReifiedGeneric() {
1707 auto const tsList = vmStack().topC();
1708 if (!tvIsVec(tsList)) {
1709 raise_error("Invalid type-structure list in RecordReifiedGeneric");
1711 // recordReifiedGenericsAndGetTSList decrefs the tsList
1712 auto const result =
1713 jit::recordReifiedGenericsAndGetTSList(tsList->m_data.parr);
1714 vmStack().discard();
1715 vmStack().pushStaticArrayLike(result);
1718 OPTBLD_INLINE void iopCheckReifiedGenericMismatch() {
1719 Class* cls = arGetContextClass(vmfp());
1720 if (!cls) raise_error("No class scope is active");
1721 auto const c = vmStack().topC();
1722 if (!tvIsVec(c)) {
1723 raise_error("Invalid type-structure list in CheckReifiedGenericMismatch");
1725 checkClassReifiedGenericMismatch(cls, c->m_data.parr);
1726 vmStack().popC();
1729 OPTBLD_INLINE void iopPrint() {
1730 TypedValue* c1 = vmStack().topC();
1731 g_context->write(tvAsVariant(*c1).toString());
1732 vmStack().replaceC<KindOfInt64>(1);
1735 OPTBLD_INLINE void iopClone() {
1736 TypedValue* tv = vmStack().topTV();
1737 if (tv->m_type != KindOfObject) {
1738 raise_error("clone called on non-object");
1740 auto newobj = tv->m_data.pobj->clone();
1741 vmStack().popTV();
1742 vmStack().pushObjectNoRc(newobj);
1745 OPTBLD_INLINE void iopExit() {
1746 int exitCode = 0;
1747 TypedValue* c1 = vmStack().topC();
1748 if (c1->m_type == KindOfInt64) {
1749 exitCode = c1->m_data.num;
1750 } else {
1751 g_context->write(tvAsVariant(*c1).toString());
1753 vmStack().popC();
1754 vmStack().pushNull();
1755 throw ExitException(exitCode);
1758 OPTBLD_INLINE void iopFatal(FatalOp kind_char) {
1759 TypedValue* top = vmStack().topTV();
1760 std::string msg;
1761 if (isStringType(top->m_type)) {
1762 msg = top->m_data.pstr->data();
1763 } else {
1764 msg = "Fatal error message not a string";
1766 vmStack().popTV();
1768 switch (kind_char) {
1769 case FatalOp::RuntimeOmitFrame:
1770 raise_error_without_first_frame(msg);
1771 break;
1772 case FatalOp::Runtime:
1773 case FatalOp::Parse:
1774 raise_error(msg);
1775 break;
1779 OPTBLD_INLINE void jmpSurpriseCheck(Offset offset) {
1780 if (offset <= 0 && UNLIKELY(checkSurpriseFlags())) {
1781 auto const flags = handle_request_surprise();
1783 // Memory Threhsold callback should also be fired here
1784 if (flags & MemThresholdFlag) {
1785 EventHook::DoMemoryThresholdCallback();
1787 if (flags & TimedOutFlag) {
1788 RID().invokeUserTimeoutCallback();
1793 OPTBLD_INLINE void iopJmp(PC& pc, PC targetpc) {
1794 jmpSurpriseCheck(targetpc - pc);
1795 pc = targetpc;
1798 OPTBLD_INLINE void iopJmpNS(PC& pc, PC targetpc) {
1799 pc = targetpc;
1802 template<Op op>
1803 OPTBLD_INLINE void jmpOpImpl(PC& pc, PC targetpc) {
1804 static_assert(op == OpJmpZ || op == OpJmpNZ,
1805 "jmpOpImpl should only be used by JmpZ and JmpNZ");
1806 jmpSurpriseCheck(targetpc - pc);
1808 TypedValue* c1 = vmStack().topC();
1809 if (c1->m_type == KindOfInt64 || c1->m_type == KindOfBoolean) {
1810 int64_t n = c1->m_data.num;
1811 vmStack().popX();
1812 if (op == OpJmpZ ? n == 0 : n != 0) pc = targetpc;
1813 } else {
1814 auto const cond = tvAsCVarRef(*c1).toBoolean();
1815 vmStack().popC();
1816 if (op == OpJmpZ ? !cond : cond) pc = targetpc;
1820 OPTBLD_INLINE void iopJmpZ(PC& pc, PC targetpc) {
1821 jmpOpImpl<OpJmpZ>(pc, targetpc);
1824 OPTBLD_INLINE void iopJmpNZ(PC& pc, PC targetpc) {
1825 jmpOpImpl<OpJmpNZ>(pc, targetpc);
1828 OPTBLD_INLINE void iopSelect() {
1829 auto const cond = [&]{
1830 auto c = vmStack().topC();
1831 if (c->m_type == KindOfInt64 || c->m_type == KindOfBoolean) {
1832 auto const val = (bool)c->m_data.num;
1833 vmStack().popX();
1834 return val;
1835 } else {
1836 auto const val = tvAsCVarRef(*c).toBoolean();
1837 vmStack().popC();
1838 return val;
1840 }();
1842 if (cond) {
1843 auto const t = *vmStack().topC();
1844 vmStack().discard();
1845 vmStack().replaceC(t);
1846 } else {
1847 vmStack().popC();
1851 OPTBLD_INLINE
1852 void iopSwitch(PC origpc, PC& pc, SwitchKind kind, int64_t base,
1853 imm_array<Offset> jmptab) {
1854 auto const veclen = jmptab.size;
1855 assertx(veclen > 0);
1856 TypedValue* val = vmStack().topTV();
1857 if (kind == SwitchKind::Unbounded) {
1858 assertx(val->m_type == KindOfInt64);
1859 // Continuation switch: no bounds checking needed
1860 int64_t label = val->m_data.num;
1861 vmStack().popX();
1862 assertx(label >= 0 && label < veclen);
1863 pc = origpc + jmptab[label];
1864 return;
1867 const auto num = val->m_data.num;
1868 const auto offset =
1869 !tvIsInt(val) || num < base || num >= (base + veclen - 2)
1870 ? veclen - 1
1871 : num - base;
1873 pc = origpc + jmptab[offset];
1874 vmStack().discard();
1877 OPTBLD_INLINE
1878 void iopSSwitch(PC origpc, PC& pc, imm_array<StrVecItem> jmptab) {
1879 auto const veclen = jmptab.size;
1880 assertx(veclen > 1);
1881 TypedValue* val = vmStack().topTV();
1883 if (tvIsString(val) || tvIsClass(val) || tvIsLazyClass(val)) {
1884 unsigned cases = veclen - 1; // the last vector item is the default case
1885 Unit* u = vmfp()->func()->unit();
1886 for (unsigned i = 0; i < cases; ++i) {
1887 auto item = jmptab[i];
1888 const StringData* str = u->lookupLitstrId(item.str);
1889 if (tvEqual(*val, str)) {
1890 pc = origpc + item.dest;
1891 vmStack().popC();
1892 return;
1897 // default case
1898 pc = origpc + jmptab[veclen - 1].dest;
1899 vmStack().popC();
1903 * jitReturnPre and jitReturnPost are used by RetC/V, CreateCont, NativeImpl,
1904 * Yield, and YieldK to perform a few tasks related to interpreting out of a
1905 * frame:
1907 * - If the current frame was entered in the TC and the jit is now off, we
1908 * throw a VMSwitchMode at the beginning of the bytecode to execute the
1909 * call's catch block (if present) before performing the return.
1910 * - If the current frame was entered in the TC and the jit is still on,
1911 * we wait until the end of the bytecode and throw a VMResumeTC, to return to
1912 * our translated caller rather than interpreting back into it.
1913 * - If the current frame was entered by the interpreter but was active when
1914 * the jit called MCGenerator::handleResume() (meaning it's the saved value
1915 * of %rbp in handleResume()'s stack frame), throw a VMResumeTC to reenter
1916 * handleResume(). This is necessary to update the value of %rbp in the TC
1917 * frame, so the unwinder doesn't read from a dead VM frame if something
1918 * throws from the interpreter later on.
1920 namespace {
1921 struct JitReturn {
1922 uint64_t savedRip;
1923 ActRec* fp;
1924 ActRec* sfp;
1925 Offset callOff;
1928 OPTBLD_INLINE JitReturn jitReturnPre(ActRec* fp) {
1929 auto savedRip = fp->m_savedRip;
1930 auto const isRetHelper = isReturnHelper(savedRip);
1931 if (isRetHelper) {
1932 // This frame was called from the interpreter, so it's ok to also return
1933 // using the interpreter.
1934 savedRip = 0;
1936 assertx(isRetHelper || isCallToExit(savedRip) || RID().getJit());
1938 return {savedRip, fp, fp->sfp(), fp->callOffset()};
1941 OPTBLD_INLINE TCA jitReturnPost(JitReturn retInfo) {
1942 if (retInfo.savedRip) {
1943 // This frame was called by translated code so we can't interpret out of
1944 // it. Resume in the TC right after our caller. This situation most
1945 // commonly happens when we interpOne a RetC due to having a VarEnv or some
1946 // other weird case.
1947 return TCA(retInfo.savedRip);
1950 if (!retInfo.sfp) {
1951 // If we don't have an sfp, we're returning from the first frame in this VM
1952 // nesting level. The vmJitCalledFrame() check below is only important if
1953 // we might throw before returning to the TC, which is guaranteed to not
1954 // happen in this situation.
1955 assertx(vmfp() == nullptr);
1956 return nullptr;
1960 // Consider a situation with a PHP function f() that calls another function
1961 // g(). If the call is interpreted, then we spend some time in the TC inside
1962 // g(), then eventually end in dispatchBB() (called by
1963 // MCGenerator::handleResume()) for g()'s RetC, the logic here kicks in.
1965 // g()'s VM frame was in %rbp when the TC called handleResume(), so it's
1966 // saved somewhere in handleResume()'s stack frame. If we return out of that
1967 // frame and keep going in the interpreter, that saved %rbp will be pointing
1968 // to a garbage VM frame. This is a problem if something needs to throw an
1969 // exception up through handleResume() and the TC frames above it, since the
1970 // C++ unwinder will attempt to treat parts of the popped VM frame as
1971 // pointers and segfault.
1973 // To avoid running with this dangling saved %rbp a few frames up, we
1974 // immediately throw an exception that is "caught" by the TC frame that
1975 // called handleResume(). We resume execution in the TC which reloads the new
1976 // vmfp() into %rbp, then handleResume() is called again, this time with a
1977 // live VM frame in %rbp.
1978 if (vmJitCalledFrame() == retInfo.fp) {
1979 FTRACE(1, "Returning from frame {}; resuming", vmJitCalledFrame());
1980 return jit::tc::ustubs().resumeHelper;
1983 return nullptr;
1986 OPTBLD_INLINE void returnToCaller(PC& pc, ActRec* sfp, Offset callOff) {
1987 vmfp() = sfp;
1988 pc = LIKELY(sfp != nullptr)
1989 ? skipCall(sfp->func()->entry() + callOff)
1990 : nullptr;
1995 template <bool suspended>
1996 OPTBLD_INLINE TCA ret(PC& pc) {
1997 assertx(!suspended || vmfp()->func()->isAsyncFunction());
1998 assertx(!suspended || !isResumed(vmfp()));
2000 // Grab info from callee's ActRec.
2001 auto const fp = vmfp();
2002 auto const func = fp->func();
2003 auto const sfp = fp->sfp();
2004 auto const jitReturn = jitReturnPre(fp);
2006 // Get the return value.
2007 TypedValue retval = *vmStack().topTV();
2008 vmStack().discard();
2010 assertx(
2011 !suspended || (tvIsObject(retval) && retval.m_data.pobj->isWaitHandle())
2014 // Free $this and local variables. Calls FunctionReturn hook. The return
2015 // value must be removed from the stack, or the unwinder would try to free it
2016 // if the hook throws---but the event hook routine decrefs the return value
2017 // in that case if necessary.
2018 // in that case if necessary.
2019 fp->setLocalsDecRefd();
2020 frame_free_locals_inl(
2022 func->numLocals(),
2023 &retval,
2024 EventHook::Source::Interpreter
2027 if (LIKELY(!isResumed(fp))) {
2028 // If in an eagerly executed async function, wrap the return value into
2029 // succeeded StaticWaitHandle. Async eager return requests are currently
2030 // not respected, as we don't have a way to obtain the async eager offset.
2031 if (UNLIKELY(func->isAsyncFunction()) && !suspended) {
2032 auto const& retvalCell = *tvAssertPlausible(&retval);
2033 // Heads up that we're assuming CreateSucceeded can't throw, or we won't
2034 // decref the return value. (It can't right now.)
2035 auto const waitHandle = c_StaticWaitHandle::CreateSucceeded(retvalCell);
2036 tvCopy(make_tv<KindOfObject>(waitHandle), retval);
2039 // Free ActRec and store the return value.
2040 vmStack().ndiscard(func->numSlotsInFrame());
2041 vmStack().ret();
2042 *vmStack().topTV() = retval;
2043 assertx(vmStack().topTV() == fp->retSlot());
2044 // In case async eager return was requested by the caller, pretend that
2045 // we did not finish eagerly as we already boxed the value.
2046 vmStack().topTV()->m_aux.u_asyncEagerReturnFlag = 0;
2047 } else if (func->isAsyncFunction()) {
2048 // Mark the async function as succeeded and store the return value.
2049 assertx(!sfp);
2050 auto wh = frame_afwh(fp);
2051 wh->ret(retval);
2052 decRefObj(wh);
2053 } else if (func->isAsyncGenerator()) {
2054 // Mark the async generator as finished.
2055 assertx(isNullType(retval.m_type));
2056 auto const gen = frame_async_generator(fp);
2057 auto const eagerResult = gen->ret();
2058 if (eagerResult) {
2059 // Eager execution => return StaticWaitHandle.
2060 assertx(sfp);
2061 vmStack().pushObjectNoRc(eagerResult);
2062 } else {
2063 // Resumed execution => return control to the scheduler.
2064 assertx(!sfp);
2066 } else if (func->isNonAsyncGenerator()) {
2067 // Mark the generator as finished and store the return value.
2068 frame_generator(fp)->ret(retval);
2070 // Push return value of next()/send()/raise().
2071 vmStack().pushNull();
2072 } else {
2073 not_reached();
2076 // Return control to the caller.
2077 returnToCaller(pc, sfp, jitReturn.callOff);
2079 return jitReturnPost(jitReturn);
2082 OPTBLD_INLINE TCA iopRetC(PC& pc) {
2083 return ret<false>(pc);
2086 OPTBLD_INLINE TCA iopRetCSuspended(PC& pc) {
2087 assertx(vmfp()->func()->isAsyncFunction());
2088 assertx(!isResumed(vmfp()));
2089 return ret<true>(pc);
2092 OPTBLD_INLINE TCA iopRetM(PC& pc, uint32_t numRet) {
2093 auto const jitReturn = jitReturnPre(vmfp());
2095 req::vector<TypedValue> retvals;
2096 retvals.reserve(numRet);
2098 for (int i = numRet - 1; i >= 0; i--) {
2099 retvals.push_back(*vmStack().indC(i));
2102 vmStack().ndiscard(numRet);
2104 // Free $this and local variables. Calls FunctionReturn hook. The return
2105 // value must be removed from the stack, or the unwinder would try to free it
2106 // if the hook throws---but the event hook routine decrefs the return value
2107 // in that case if necessary.
2108 frame_free_locals_inl(
2109 vmfp(),
2110 vmfp()->func()->numLocals(),
2111 &retvals[0],
2112 EventHook::Source::Interpreter
2115 assertx(!vmfp()->func()->isGenerator() && !vmfp()->func()->isAsync());
2117 // Grab caller info from ActRec.
2118 ActRec* sfp = vmfp()->sfp();
2119 Offset callOff = vmfp()->callOffset();
2121 // Free ActRec and store the return value.
2122 vmStack().ndiscard(vmfp()->func()->numSlotsInFrame());
2123 vmStack().ret();
2125 // Discard scratch space for return values allocated for multi return FCall
2126 vmStack().ndiscard(numRet - 1);
2127 *vmStack().topTV() = retvals[1];
2129 for (int i = 2; i < numRet; i++) {
2130 *vmStack().allocTV() = retvals[i];
2133 // Store the actual return value at the top of the stack
2134 *vmStack().allocTV() = retvals[0];
2136 // Return control to the caller.
2137 returnToCaller(pc, sfp, callOff);
2139 return jitReturnPost(jitReturn);
2142 OPTBLD_INLINE void iopThrow(PC&) {
2143 TypedValue* c1 = vmStack().topC();
2144 if (c1->m_type != KindOfObject ||
2145 !c1->m_data.pobj->instanceof(SystemLib::s_ThrowableClass)) {
2146 raise_error("Exceptions must implement the Throwable interface.");
2148 auto obj = Object::attach(c1->m_data.pobj);
2149 vmStack().discard();
2150 DEBUGGER_ATTACHED_ONLY(phpDebuggerExceptionThrownHook(obj.get()));
2151 throw req::root<Object>(std::move(obj));
2154 OPTBLD_INLINE void iopThrowNonExhaustiveSwitch() {
2155 SystemLib::throwRuntimeExceptionObject(String(Strings::NONEXHAUSTIVE_SWITCH));
2158 OPTBLD_INLINE void iopRaiseClassStringConversionWarning() {
2159 if (RuntimeOption::EvalRaiseClassConversionWarning) {
2160 raise_class_to_string_conversion_warning();
2164 OPTBLD_INLINE void iopResolveClass(Id id) {
2165 auto const cname = vmfp()->unit()->lookupLitstrId(id);
2166 auto const class_ = Class::load(cname);
2167 // TODO (T61651936): Disallow implicit conversion to string
2168 if (class_ == nullptr) {
2169 if (RuntimeOption::EvalRaiseClassConversionWarning) {
2170 raise_class_to_string_conversion_warning();
2172 vmStack().pushStaticString(cname);
2174 else {
2175 vmStack().pushClass(class_);
2179 OPTBLD_INLINE void iopLazyClass(Id id) {
2180 auto const cname = vmfp()->unit()->lookupLitstrId(id);
2181 auto const lclass = LazyClassData::create(cname);
2182 vmStack().pushLazyClass(lclass);
2185 OPTBLD_INLINE void iopClassGetC() {
2186 auto const cell = vmStack().topC();
2187 if (isStringType(cell->m_type)) {
2188 raise_str_to_class_notice(cell->m_data.pstr);
2190 auto const cls = lookupClsRef(cell);
2191 vmStack().popC();
2192 vmStack().pushClass(cls);
2195 OPTBLD_INLINE void iopClassGetTS() {
2196 auto const cell = vmStack().topC();
2197 if (!tvIsDict(cell)) {
2198 raise_error("Reified type must be a type structure");
2200 auto const ts = cell->m_data.parr;
2201 auto const classname_field = ts->get(s_classname.get());
2202 if (!classname_field.is_init()) {
2203 raise_error("You cannot create a new instance of this type as "
2204 "it is not a class");
2206 assertx(isStringType(classname_field.type()));
2207 auto const name = classname_field.val().pstr;
2208 auto const generics_field = ts->get(s_generic_types.get());
2209 ArrayData* reified_types = nullptr;
2210 if (generics_field.is_init()) {
2211 reified_types = generics_field.val().parr;
2212 auto const mangledTypeName =
2213 makeStaticString(mangleReifiedGenericsName(reified_types));
2214 reified_types->incRefCount();
2215 reified_types = addToReifiedGenericsTable(mangledTypeName, reified_types);
2217 auto const cls = Class::load(name);
2218 if (cls == nullptr) {
2219 raise_error(Strings::UNKNOWN_CLASS, name->data());
2222 vmStack().popC();
2223 vmStack().pushClass(cls);
2224 if (reified_types) {
2225 vmStack().pushStaticArrayLike(reified_types);
2226 } else {
2227 vmStack().pushNull();
2231 static void raise_undefined_local(ActRec* fp, LocalName pind) {
2232 assertx(pind < fp->func()->numNamedLocals());
2233 assertx(fp->func()->localVarName(pind));
2234 if (debug) {
2235 auto vm = &*g_context;
2236 always_assert_flog(
2237 pind != kInvalidLocalName,
2238 "HHBBC incorrectly removed name info for a local in {}:{}",
2239 vm->getContainingFileName()->data(),
2240 vm->getLine()
2243 SystemLib::throwUndefinedVariableExceptionObject(
2244 folly::sformat("Undefined variable: {}",
2245 fp->func()->localVarName(pind)->data()));
2248 static inline void cgetl_inner_body(tv_rval fr, TypedValue* to) {
2249 assertx(type(fr) != KindOfUninit);
2250 tvDup(*fr, *to);
2253 OPTBLD_INLINE void cgetl_body(ActRec* fp,
2254 tv_rval fr,
2255 TypedValue* to,
2256 LocalName lname,
2257 bool warn) {
2258 if (type(fr) == KindOfUninit) {
2259 // `to' is uninitialized here, so we need to tvWriteNull before
2260 // possibly causing stack unwinding.
2261 tvWriteNull(*to);
2262 if (warn) raise_undefined_local(fp, lname);
2263 } else {
2264 cgetl_inner_body(fr, to);
2268 OPTBLD_INLINE void iopCGetL(named_local_var fr) {
2269 TypedValue* to = vmStack().allocC();
2270 cgetl_body(vmfp(), fr.lval, to, fr.name, true);
2273 OPTBLD_INLINE void iopCGetQuietL(tv_lval fr) {
2274 TypedValue* to = vmStack().allocC();
2275 cgetl_body(vmfp(), fr, to, kInvalidLocalName, false);
2278 OPTBLD_INLINE void iopCUGetL(tv_lval fr) {
2279 auto to = vmStack().allocTV();
2280 tvDup(*fr, *to);
2283 OPTBLD_INLINE void iopCGetL2(named_local_var fr) {
2284 TypedValue* oldTop = vmStack().topTV();
2285 TypedValue* newTop = vmStack().allocTV();
2286 memcpy(newTop, oldTop, sizeof *newTop);
2287 TypedValue* to = oldTop;
2288 cgetl_body(vmfp(), fr.lval, to, fr.name, true);
2291 OPTBLD_INLINE void iopPushL(tv_lval locVal) {
2292 assertx(type(locVal) != KindOfUninit);
2293 TypedValue* dest = vmStack().allocTV();
2294 *dest = *locVal;
2295 type(locVal) = KindOfUninit;
2298 OPTBLD_INLINE void iopCGetG() {
2299 StringData* name;
2300 TypedValue* to = vmStack().topTV();
2301 auto const fr = lookup_gbl(vmfp(), name, to);
2302 SCOPE_EXIT { decRefStr(name); };
2303 tvDecRefGen(to);
2304 if (!fr || type(fr) == KindOfUninit) {
2305 tvWriteNull(*to);
2306 } else {
2307 cgetl_inner_body(fr, to);
2311 struct SpropState {
2312 SpropState(Stack&, bool ignoreLateInit);
2313 ~SpropState();
2314 StringData* name;
2315 Class* cls;
2316 TypedValue* output;
2317 TypedValue* val;
2318 TypedValue oldNameCell;
2319 Slot slot;
2320 bool visible;
2321 bool accessible;
2322 bool constant;
2323 bool readonly;
2324 Stack& vmstack;
2327 SpropState::SpropState(Stack& vmstack, bool ignoreLateInit) : vmstack{vmstack} {
2328 auto const clsCell = vmstack.topC();
2329 auto const nameCell = output = vmstack.indTV(1);
2330 if (!isClassType(clsCell->m_type)) {
2331 raise_error("SpropState: expected class");
2333 cls = clsCell->m_data.pclass;
2334 lookup_sprop(vmfp(), cls, name, nameCell, val,
2335 slot, visible, accessible, constant, readonly, ignoreLateInit);
2336 oldNameCell = *nameCell;
2339 SpropState::~SpropState() {
2340 vmstack.discard();
2341 decRefStr(name);
2342 tvDecRefGen(oldNameCell);
2345 OPTBLD_INLINE void iopCGetS(ReadonlyOp op) {
2346 SpropState ss(vmStack(), false);
2347 if (!(ss.visible && ss.accessible)) {
2348 raise_error("Invalid static property access: %s::%s",
2349 ss.cls->name()->data(),
2350 ss.name->data());
2352 if (RO::EvalEnableReadonlyPropertyEnforcement &&
2353 ss.readonly && op == ReadonlyOp::Mutable) {
2354 throw_or_warn_must_be_enclosed_in_readonly(
2355 ss.cls->name()->data(), ss.name->data()
2358 tvDup(*ss.val, *ss.output);
2361 static inline void baseGImpl(tv_rval key, MOpMode mode) {
2362 auto& mstate = vmMInstrState();
2363 StringData* name;
2364 mstate.roProp = false;
2366 auto const baseVal = (mode == MOpMode::Define)
2367 ? lookupd_gbl(vmfp(), name, key)
2368 : lookup_gbl(vmfp(), name, key);
2369 SCOPE_EXIT { decRefStr(name); };
2371 if (!baseVal) {
2372 assertx(mode != MOpMode::Define);
2373 if (mode == MOpMode::Warn) {
2374 SystemLib::throwOutOfBoundsExceptionObject(
2375 folly::sformat("Undefined index: {}", name)
2378 tvWriteNull(mstate.tvTempBase);
2379 mstate.base = &mstate.tvTempBase;
2380 return;
2383 mstate.base = baseVal;
2386 OPTBLD_INLINE void iopBaseGC(uint32_t idx, MOpMode mode) {
2387 baseGImpl(vmStack().indTV(idx), mode);
2390 OPTBLD_INLINE void iopBaseGL(tv_lval loc, MOpMode mode) {
2391 baseGImpl(loc, mode);
2394 OPTBLD_INLINE void iopBaseSC(uint32_t keyIdx,
2395 uint32_t clsIdx,
2396 MOpMode mode,
2397 ReadonlyOp op) {
2398 auto& mstate = vmMInstrState();
2399 auto const clsCell = vmStack().indC(clsIdx);
2400 auto const key = vmStack().indTV(keyIdx);
2402 if (!isClassType(clsCell->m_type)) {
2403 raise_error("Attempting to obtain static base on non-class");
2405 auto const class_ = clsCell->m_data.pclass;
2407 auto const name = lookup_name(key);
2408 SCOPE_EXIT { decRefStr(name); };
2409 auto const lookup = class_->getSProp(arGetContextClass(vmfp()), name);
2410 if (!lookup.val || !lookup.accessible) {
2411 raise_error("Invalid static property access: %s::%s",
2412 class_->name()->data(),
2413 name->data());
2415 assertx(mode != MOpMode::InOut);
2416 auto const writeMode = mode == MOpMode::Define || mode == MOpMode::Unset;
2418 if (lookup.constant && writeMode) {
2419 throw_cannot_modify_static_const_prop(class_->name()->data(),
2420 name->data());
2423 mstate.roProp = false;
2424 checkReadonly(lookup.val, class_, name, lookup.readonly, op, writeMode);
2425 mstate.base = tv_lval(lookup.val);
2428 OPTBLD_INLINE void baseLImpl(named_local_var loc, MOpMode mode, ReadonlyOp op) {
2429 auto& mstate = vmMInstrState();
2430 auto const local = loc.lval;
2431 if (mode == MOpMode::Warn && type(local) == KindOfUninit) {
2432 raise_undefined_local(vmfp(), loc.name);
2435 mstate.roProp = false;
2436 if (readonlyLocalShouldThrow(*local, op)) {
2437 assertx(loc.name < vmfp()->func()->numNamedLocals());
2438 assertx(vmfp()->func()->localVarName(loc.name));
2439 auto const name = vmfp()->func()->localVarName(loc.name);
2440 throw_or_warn_local_must_be_value_type(name->data());
2442 mstate.base = local;
2445 OPTBLD_INLINE void iopBaseL(named_local_var loc, MOpMode mode, ReadonlyOp op) {
2446 baseLImpl(loc, mode, op);
2449 OPTBLD_INLINE void iopBaseC(uint32_t idx, MOpMode) {
2450 auto& mstate = vmMInstrState();
2451 mstate.base = vmStack().indC(idx);
2452 mstate.roProp = false;
2455 OPTBLD_INLINE void iopBaseH() {
2456 auto& mstate = vmMInstrState();
2457 mstate.tvTempBase = make_tv<KindOfObject>(vmfp()->getThis());
2458 mstate.base = &mstate.tvTempBase;
2459 mstate.roProp = false;
2462 static OPTBLD_INLINE void propDispatch(MOpMode mode, TypedValue key, ReadonlyOp op) {
2463 auto& mstate = vmMInstrState();
2464 auto ctx = arGetContextClass(vmfp());
2466 mstate.base = [&]{
2467 switch (mode) {
2468 case MOpMode::None:
2469 return Prop<MOpMode::None>(mstate.tvTempBase, ctx, mstate.base, key, op);
2470 case MOpMode::Warn:
2471 return Prop<MOpMode::Warn>(mstate.tvTempBase, ctx, mstate.base, key, op);
2472 case MOpMode::Define:
2473 return Prop<MOpMode::Define,KeyType::Any>(
2474 mstate.tvTempBase, ctx, mstate.base, key, op
2476 case MOpMode::Unset:
2477 return Prop<MOpMode::Unset>(mstate.tvTempBase, ctx, mstate.base, key, op);
2478 case MOpMode::InOut:
2479 always_assert_flog(false, "MOpMode::InOut can only occur on Elem");
2481 always_assert(false);
2482 }();
2485 static OPTBLD_INLINE void propQDispatch(MOpMode mode, TypedValue key, ReadonlyOp op) {
2486 auto& mstate = vmMInstrState();
2487 auto ctx = arGetContextClass(vmfp());
2489 assertx(mode == MOpMode::None || mode == MOpMode::Warn);
2490 assertx(key.m_type == KindOfPersistentString);
2491 if (mode == MOpMode::None) {
2492 mstate.base = nullSafeProp<MOpMode::None>(mstate.tvTempBase, ctx,
2493 mstate.base, key.m_data.pstr, op);
2494 } else {
2495 mstate.base = nullSafeProp<MOpMode::Warn>(mstate.tvTempBase, ctx,
2496 mstate.base, key.m_data.pstr, op);
2500 static OPTBLD_INLINE
2501 void elemDispatch(MOpMode mode, TypedValue key) {
2502 auto& mstate = vmMInstrState();
2503 auto const b = mstate.base;
2505 auto const baseValueToLval = [&](TypedValue base) {
2506 mstate.tvTempBase = base;
2507 return tv_lval { &mstate.tvTempBase };
2510 auto const checkDimForReadonly = [&](DataType dt) {
2511 if (!RO::EvalEnableReadonlyPropertyEnforcement) return;
2512 if (mstate.roProp && dt == KindOfObject) {
2513 throw_or_warn_cannot_modify_readonly_collection();
2517 mstate.base = [&]{
2518 switch (mode) {
2519 case MOpMode::None:
2520 return baseValueToLval(Elem<MOpMode::None>(b, key));
2521 case MOpMode::Warn:
2522 return baseValueToLval(Elem<MOpMode::Warn>(b, key));
2523 case MOpMode::InOut:
2524 return baseValueToLval(Elem<MOpMode::InOut>(b, key));
2525 case MOpMode::Define: {
2526 auto const result = ElemD(b, key);
2527 checkDimForReadonly(result.type());
2528 return result;
2530 case MOpMode::Unset: {
2531 auto const result = ElemU(b, key);
2532 checkDimForReadonly(result.type());
2533 return result;
2536 always_assert(false);
2537 }();
2540 static inline TypedValue key_tv(MemberKey key) {
2541 switch (key.mcode) {
2542 case MW:
2543 return TypedValue{};
2544 case MEL: case MPL: {
2545 auto const local = frame_local(vmfp(), key.local.id);
2546 if (type(local) == KindOfUninit) {
2547 raise_undefined_local(vmfp(), key.local.name);
2548 return make_tv<KindOfNull>();
2550 return tvClassToString(*local);
2552 case MEC: case MPC:
2553 return tvClassToString(*vmStack().indTV(key.iva));
2554 case MEI:
2555 return make_tv<KindOfInt64>(key.int64);
2556 case MET: case MPT: case MQT:
2557 return make_tv<KindOfPersistentString>(key.litstr);
2559 not_reached();
2562 static OPTBLD_INLINE void dimDispatch(MOpMode mode, MemberKey mk) {
2563 auto const key = key_tv(mk);
2564 if (mk.mcode == MQT) {
2565 propQDispatch(mode, key, mk.rop);
2566 } else if (mcodeIsProp(mk.mcode)) {
2567 propDispatch(mode, key, mk.rop);
2568 } else if (mcodeIsElem(mk.mcode)) {
2569 elemDispatch(mode, key);
2570 } else {
2571 if (mode == MOpMode::Warn) raise_error("Cannot use [] for reading");
2572 auto& mstate = vmMInstrState();
2573 mstate.base = NewElem(mstate.base);
2577 OPTBLD_INLINE void iopDim(MOpMode mode, MemberKey mk) {
2578 dimDispatch(mode, mk);
2581 static OPTBLD_INLINE void mFinal(MInstrState& mstate,
2582 int32_t nDiscard,
2583 Optional<TypedValue> result) {
2584 auto& stack = vmStack();
2585 for (auto i = 0; i < nDiscard; ++i) stack.popTV();
2586 if (result) tvCopy(*result, *stack.allocTV());
2589 static OPTBLD_INLINE
2590 void queryMImpl(MemberKey mk, int32_t nDiscard, QueryMOp op) {
2591 auto& mstate = vmMInstrState();
2592 TypedValue result;
2593 switch (op) {
2594 case QueryMOp::InOut:
2595 always_assert_flog(
2596 mcodeIsElem(mk.mcode), "QueryM InOut is only compatible with Elem"
2598 // fallthrough
2599 case QueryMOp::CGet:
2600 case QueryMOp::CGetQuiet:
2601 dimDispatch(getQueryMOpMode(op), mk);
2602 tvDup(*mstate.base, result);
2603 break;
2605 case QueryMOp::Isset:
2606 result.m_type = KindOfBoolean;
2607 auto const key = key_tv(mk);
2608 if (mcodeIsProp(mk.mcode)) {
2609 auto const ctx = arGetContextClass(vmfp());
2610 result.m_data.num = IssetProp(ctx, mstate.base, key);
2611 } else {
2612 assertx(mcodeIsElem(mk.mcode));
2613 result.m_data.num = IssetElem(mstate.base, key);
2615 break;
2617 mFinal(mstate, nDiscard, result);
2620 OPTBLD_INLINE void iopQueryM(uint32_t nDiscard, QueryMOp subop, MemberKey mk) {
2621 queryMImpl(mk, nDiscard, subop);
2624 OPTBLD_INLINE void iopSetM(uint32_t nDiscard, MemberKey mk) {
2625 auto& mstate = vmMInstrState();
2626 auto const topC = vmStack().topC();
2628 if (mk.mcode == MW) {
2629 SetNewElem<true>(mstate.base, topC);
2630 } else {
2631 auto const key = key_tv(mk);
2632 if (mcodeIsElem(mk.mcode)) {
2633 auto const result = SetElem<true>(mstate.base, key, topC);
2634 if (result) {
2635 tvDecRefGen(topC);
2636 topC->m_type = KindOfString;
2637 topC->m_data.pstr = result;
2639 } else {
2640 auto const ctx = arGetContextClass(vmfp());
2641 SetProp<true>(ctx, mstate.base, key, topC, mk.rop);
2645 auto const result = *topC;
2646 vmStack().discard();
2647 mFinal(mstate, nDiscard, result);
2650 OPTBLD_INLINE void iopSetRangeM( uint32_t nDiscard, uint32_t size, SetRangeOp op) {
2651 auto& mstate = vmMInstrState();
2652 auto const count = tvCastToInt64(*vmStack().indC(0));
2653 auto const src = *vmStack().indC(1);
2654 auto const offset = tvCastToInt64(*vmStack().indC(2));
2656 if (op == SetRangeOp::Forward) {
2657 SetRange<false>(mstate.base, offset, src, count, size);
2658 } else {
2659 SetRange<true>(mstate.base, offset, src, count, size);
2662 mFinal(mstate, nDiscard + 3, std::nullopt);
2665 OPTBLD_INLINE void iopIncDecM(uint32_t nDiscard, IncDecOp subop, MemberKey mk) {
2666 auto const key = key_tv(mk);
2668 auto& mstate = vmMInstrState();
2669 auto const result = [&]{
2670 if (mcodeIsProp(mk.mcode)) {
2671 return IncDecProp(arGetContextClass(vmfp()), subop, mstate.base, key);
2672 } else if (mcodeIsElem(mk.mcode)) {
2673 return IncDecElem(subop, mstate.base, key);
2674 } else {
2675 return IncDecNewElem(subop, mstate.base);
2677 }();
2679 mFinal(mstate, nDiscard, result);
2682 OPTBLD_INLINE void iopSetOpM(uint32_t nDiscard, SetOpOp subop, MemberKey mk) {
2683 auto const key = key_tv(mk);
2684 auto const rhs = vmStack().topC();
2686 auto& mstate = vmMInstrState();
2687 auto const result = [&]{
2688 if (mcodeIsProp(mk.mcode)) {
2689 return *SetOpProp(mstate.tvTempBase, arGetContextClass(vmfp()),
2690 subop, mstate.base, key, rhs);
2691 } else if (mcodeIsElem(mk.mcode)) {
2692 return SetOpElem(subop, mstate.base, key, rhs);
2693 } else {
2694 return SetOpNewElem(subop, mstate.base, rhs);
2696 }();
2698 vmStack().popC();
2699 tvIncRefGen(result);
2700 mFinal(mstate, nDiscard, result);
2703 OPTBLD_INLINE void iopUnsetM(uint32_t nDiscard, MemberKey mk) {
2704 auto const key = key_tv(mk);
2706 auto& mstate = vmMInstrState();
2707 if (mcodeIsProp(mk.mcode)) {
2708 UnsetProp(arGetContextClass(vmfp()), mstate.base, key);
2709 } else {
2710 assertx(mcodeIsElem(mk.mcode));
2711 UnsetElem(mstate.base, key);
2714 mFinal(mstate, nDiscard, std::nullopt);
2717 namespace {
2719 inline void checkThis(ActRec* fp) {
2720 if (!fp->func()->cls() || !fp->hasThis()) {
2721 raise_error(Strings::FATAL_NULL_THIS);
2725 OPTBLD_INLINE const TypedValue* memoGetImpl(LocalRange keys) {
2726 auto const fp = vmfp();
2727 auto const func = fp->func();
2728 assertx(func->isMemoizeWrapper());
2729 assertx(keys.first + keys.count <= func->numLocals());
2731 for (auto i = 0; i < keys.count; ++i) {
2732 auto const key = frame_local(fp, keys.first + i);
2733 if (!isIntType(type(key)) && !isStringType(type(key))) {
2734 raise_error("Memoization keys can only be ints or strings");
2738 auto const c = [&] () -> const TypedValue* {
2739 if (!func->isMethod() || func->isStatic()) {
2740 auto const lsbCls =
2741 func->isMemoizeWrapperLSB() ? fp->getClass() : nullptr;
2742 if (keys.count > 0) {
2743 auto cache =
2744 lsbCls ? rds::bindLSBMemoCache(lsbCls, func)
2745 : rds::bindStaticMemoCache(func);
2746 if (!cache.isInit()) return nullptr;
2747 auto const keysBegin = frame_local(fp, keys.first + keys.count - 1);
2748 if (auto getter = memoCacheGetForKeyCount(keys.count)) {
2749 return getter(*cache, keysBegin);
2751 return memoCacheGetGeneric(
2752 *cache,
2753 GenericMemoId{func->getFuncId(), keys.count}.asParam(),
2754 keysBegin
2758 auto cache =
2759 lsbCls ? rds::bindLSBMemoValue(lsbCls, func)
2760 : rds::bindStaticMemoValue(func);
2761 return cache.isInit() ? cache.get() : nullptr;
2764 checkThis(fp);
2765 auto const this_ = fp->getThis();
2766 auto const cls = func->cls();
2767 assertx(this_->instanceof(cls));
2768 assertx(cls->hasMemoSlots());
2770 auto const memoInfo = cls->memoSlotForFunc(func->getFuncId());
2772 auto const slot = UNLIKELY(this_->hasNativeData())
2773 ? this_->memoSlotNativeData(memoInfo.first, cls->getNativeDataInfo()->sz)
2774 : this_->memoSlot(memoInfo.first);
2776 if (keys.count == 0 && !memoInfo.second) {
2777 auto const val = slot->getValue();
2778 return val->m_type != KindOfUninit ? val : nullptr;
2781 auto const cache = slot->getCache();
2782 if (!cache) return nullptr;
2784 if (memoInfo.second) {
2785 if (keys.count == 0) {
2786 return memoCacheGetSharedOnly(
2787 cache,
2788 makeSharedOnlyKey(func->getFuncId())
2791 auto const keysBegin = frame_local(fp, keys.first + keys.count - 1);
2792 if (auto const getter = sharedMemoCacheGetForKeyCount(keys.count)) {
2793 return getter(cache, func->getFuncId(), keysBegin);
2795 return memoCacheGetGeneric(
2796 cache,
2797 GenericMemoId{func->getFuncId(), keys.count}.asParam(),
2798 keysBegin
2802 assertx(keys.count > 0);
2803 auto const keysBegin = frame_local(fp, keys.first + keys.count - 1);
2804 if (auto const getter = memoCacheGetForKeyCount(keys.count)) {
2805 return getter(cache, keysBegin);
2807 return memoCacheGetGeneric(
2808 cache,
2809 GenericMemoId{func->getFuncId(), keys.count}.asParam(),
2810 keysBegin
2812 }();
2814 assertx(!c || tvIsPlausible(*c));
2815 assertx(!c || c->m_type != KindOfUninit);
2816 return c;
2821 OPTBLD_INLINE void iopMemoGet(PC& pc, PC notfound, LocalRange keys) {
2822 if (auto const c = memoGetImpl(keys)) {
2823 tvDup(*c, *vmStack().allocC());
2824 } else {
2825 pc = notfound;
2829 OPTBLD_INLINE void iopMemoGetEager(PC& pc,
2830 PC notfound,
2831 PC suspended,
2832 LocalRange keys) {
2833 assertx(vmfp()->func()->isAsyncFunction());
2834 assertx(!isResumed(vmfp()));
2836 if (auto const c = memoGetImpl(keys)) {
2837 tvDup(*c, *vmStack().allocC());
2838 if (!c->m_aux.u_asyncEagerReturnFlag) {
2839 assertx(tvIsObject(c) && c->m_data.pobj->isWaitHandle());
2840 pc = suspended;
2842 } else {
2843 pc = notfound;
2847 namespace {
2849 OPTBLD_INLINE void memoSetImpl(LocalRange keys, TypedValue val) {
2850 auto const fp = vmfp();
2851 auto const func = fp->func();
2852 assertx(func->isMemoizeWrapper());
2853 assertx(keys.first + keys.count <= func->numLocals());
2854 assertx(tvIsPlausible(val));
2856 for (auto i = 0; i < keys.count; ++i) {
2857 auto const key = frame_local(fp, keys.first + i);
2858 if (!isIntType(type(key)) && !isStringType(type(key))) {
2859 raise_error("Memoization keys can only be ints or strings");
2863 if (!func->isMethod() || func->isStatic()) {
2864 auto const lsbCls = func->isMemoizeWrapperLSB() ? fp->getClass() : nullptr;
2865 if (keys.count > 0) {
2866 auto cache =
2867 lsbCls ? rds::bindLSBMemoCache(lsbCls, func)
2868 : rds::bindStaticMemoCache(func);
2869 if (!cache.isInit()) cache.initWith(nullptr);
2870 auto const keysBegin = frame_local(fp, keys.first + keys.count - 1);
2871 if (auto setter = memoCacheSetForKeyCount(keys.count)) {
2872 return setter(*cache, keysBegin, val);
2874 return memoCacheSetGeneric(
2875 *cache,
2876 GenericMemoId{func->getFuncId(), keys.count}.asParam(),
2877 keysBegin,
2882 auto cache =
2883 lsbCls ? rds::bindLSBMemoValue(lsbCls, func)
2884 : rds::bindStaticMemoValue(func);
2885 if (!cache.isInit()) {
2886 tvWriteUninit(*cache);
2887 cache.markInit();
2890 tvSetWithAux(val, *cache);
2891 return;
2894 checkThis(fp);
2895 auto const this_ = fp->getThis();
2896 auto const cls = func->cls();
2897 assertx(this_->instanceof(cls));
2898 assertx(cls->hasMemoSlots());
2900 this_->setAttribute(ObjectData::UsedMemoCache);
2902 auto const memoInfo = cls->memoSlotForFunc(func->getFuncId());
2904 auto slot = UNLIKELY(this_->hasNativeData())
2905 ? this_->memoSlotNativeData(memoInfo.first, cls->getNativeDataInfo()->sz)
2906 : this_->memoSlot(memoInfo.first);
2908 if (keys.count == 0 && !memoInfo.second) {
2909 tvSetWithAux(val, *slot->getValue());
2910 return;
2913 auto& cache = slot->getCacheForWrite();
2915 if (memoInfo.second) {
2916 if (keys.count == 0) {
2917 return memoCacheSetSharedOnly(
2918 cache,
2919 makeSharedOnlyKey(func->getFuncId()),
2923 auto const keysBegin = frame_local(fp, keys.first + keys.count - 1);
2924 if (auto const setter = sharedMemoCacheSetForKeyCount(keys.count)) {
2925 return setter(cache, func->getFuncId(), keysBegin, val);
2927 return memoCacheSetGeneric(
2928 cache,
2929 GenericMemoId{func->getFuncId(), keys.count}.asParam(),
2930 keysBegin,
2935 assertx(keys.count > 0);
2936 auto const keysBegin = frame_local(fp, keys.first + keys.count - 1);
2937 if (auto const setter = memoCacheSetForKeyCount(keys.count)) {
2938 return setter(cache, keysBegin, val);
2940 return memoCacheSetGeneric(
2941 cache,
2942 GenericMemoId{func->getFuncId(), keys.count}.asParam(),
2943 keysBegin,
2950 OPTBLD_INLINE void iopMemoSet(LocalRange keys) {
2951 auto val = *vmStack().topC();
2952 assertx(val.m_type != KindOfUninit);
2953 if (vmfp()->func()->isAsyncFunction()) {
2954 assertx(tvIsObject(val) && val.m_data.pobj->isWaitHandle());
2955 val.m_aux.u_asyncEagerReturnFlag = 0;
2957 memoSetImpl(keys, val);
2960 OPTBLD_INLINE void iopMemoSetEager(LocalRange keys) {
2961 assertx(vmfp()->func()->isAsyncFunction());
2962 assertx(!isResumed(vmfp()));
2963 auto val = *vmStack().topC();
2964 assertx(val.m_type != KindOfUninit);
2965 val.m_aux.u_asyncEagerReturnFlag = static_cast<uint32_t>(-1);
2966 memoSetImpl(keys, val);
2969 OPTBLD_INLINE void iopIssetG() {
2970 StringData* name;
2971 TypedValue* tv1 = vmStack().topTV();
2972 auto const lval = lookup_gbl(vmfp(), name, tv1);
2973 SCOPE_EXIT { decRefStr(name); };
2974 auto const e = lval && !tvIsNull(lval);
2975 vmStack().replaceC<KindOfBoolean>(e);
2978 OPTBLD_INLINE void iopIssetS() {
2979 SpropState ss(vmStack(), true);
2980 bool e;
2981 if (!(ss.visible && ss.accessible)) {
2982 e = false;
2983 } else {
2984 e = !tvIsNull(ss.val);
2986 ss.output->m_data.num = e;
2987 ss.output->m_type = KindOfBoolean;
2990 OPTBLD_INLINE void iopIssetL(tv_lval val) {
2991 bool ret = !is_null(val);
2992 TypedValue* topTv = vmStack().allocTV();
2993 topTv->m_data.num = ret;
2994 topTv->m_type = KindOfBoolean;
2997 OPTBLD_INLINE void iopIsUnsetL(tv_lval val) {
2998 bool ret = type(val) == KindOfUninit;
2999 TypedValue* topTv = vmStack().allocTV();
3000 topTv->m_data.num = ret;
3001 topTv->m_type = KindOfBoolean;
3004 OPTBLD_INLINE static bool isTypeHelper(TypedValue val, IsTypeOp op) {
3005 assertx(tvIsPlausible(val));
3007 switch (op) {
3008 case IsTypeOp::Null: return is_null(&val);
3009 case IsTypeOp::Bool: return is_bool(&val);
3010 case IsTypeOp::Int: return is_int(&val);
3011 case IsTypeOp::Dbl: return is_double(&val);
3012 case IsTypeOp::Vec: return is_vec(&val);
3013 case IsTypeOp::Dict: return is_dict(&val);
3014 case IsTypeOp::Keyset: return is_keyset(&val);
3015 case IsTypeOp::Obj: return is_object(&val);
3016 case IsTypeOp::Str: return is_string(&val);
3017 case IsTypeOp::Res: return tvIsResource(val);
3018 case IsTypeOp::Scalar: return HHVM_FN(is_scalar)(tvAsCVarRef(val));
3019 case IsTypeOp::ArrLike: return is_any_array(&val);
3020 case IsTypeOp::LegacyArrLike: {
3021 return HHVM_FN(is_array_marked_legacy)(tvAsCVarRef(val));
3023 case IsTypeOp::ClsMeth: return is_clsmeth(&val);
3024 case IsTypeOp::Func: return is_fun(&val);
3025 case IsTypeOp::Class: return is_class(&val);
3027 not_reached();
3030 OPTBLD_INLINE void iopIsTypeL(named_local_var loc, IsTypeOp op) {
3031 if (type(loc.lval) == KindOfUninit) {
3032 raise_undefined_local(vmfp(), loc.name);
3034 vmStack().pushBool(isTypeHelper(*loc.lval, op));
3037 OPTBLD_INLINE void iopIsTypeC(IsTypeOp op) {
3038 auto val = vmStack().topC();
3039 vmStack().replaceC(make_tv<KindOfBoolean>(isTypeHelper(*val, op)));
3042 OPTBLD_INLINE void iopAssertRATL(local_var loc, RepoAuthType rat) {
3043 if (debug) {
3044 auto const val = *loc.lval;
3045 auto const func = vmfp()->func();
3046 auto vm = &*g_context;
3047 always_assert_flog(
3048 tvMatchesRepoAuthType(val, rat),
3049 "failed assert RATL on local slot {}: maybe ${} in {}:{}, expected {},"
3050 " got {}",
3051 loc.index,
3052 loc.index < func->numNamedLocals() && func->localNames()[loc.index]
3053 ? func->localNames()[loc.index]->data()
3054 : "<unnamed/unknown>",
3055 vm->getContainingFileName()->data(),
3056 vm->getLine(),
3057 show(rat),
3058 toStringElm(val)
3063 OPTBLD_INLINE void iopAssertRATStk(uint32_t stkSlot, RepoAuthType rat) {
3064 if (debug) {
3065 auto const tv = *vmStack().indTV(stkSlot);
3066 auto vm = &*g_context;
3067 always_assert_flog(
3068 tvMatchesRepoAuthType(tv, rat),
3069 "failed assert RATStk {} in {}:{}, expected {}, got {}",
3070 stkSlot,
3071 vm->getContainingFileName()->data(),
3072 vm->getLine(),
3073 show(rat),
3074 toStringElm(tv)
3079 OPTBLD_INLINE void iopBreakTraceHint() {
3082 OPTBLD_INLINE void iopAKExists() {
3083 TypedValue* arr = vmStack().topTV();
3084 auto key = tvClassToString(*(arr + 1));
3085 bool result = HHVM_FN(array_key_exists)(tvAsCVarRef(key), tvAsCVarRef(arr));
3086 vmStack().popTV();
3087 vmStack().replaceTV<KindOfBoolean>(result);
3090 OPTBLD_INLINE void iopGetMemoKeyL(named_local_var loc) {
3091 DEBUG_ONLY auto const func = vmfp()->func();
3092 assertx(func->isMemoizeWrapper());
3093 assertx(tvIsPlausible(*loc.lval));
3095 if (UNLIKELY(type(loc.lval) == KindOfUninit)) {
3096 tvWriteNull(loc.lval);
3097 raise_undefined_local(vmfp(), loc.name);
3100 // Use the generic scheme, which is performed by
3101 // serialize_memoize_param.
3102 auto const key = HHVM_FN(serialize_memoize_param)(*loc.lval);
3103 tvCopy(key, *vmStack().allocC());
3106 OPTBLD_INLINE void iopIdx() {
3107 TypedValue* def = vmStack().topTV();
3108 auto const key = tvClassToString(*vmStack().indTV(1));
3109 TypedValue* arr = vmStack().indTV(2);
3111 if (isNullType(key.m_type)) {
3112 tvDecRefGen(arr);
3113 *arr = *def;
3114 vmStack().ndiscard(2);
3115 return;
3118 TypedValue result;
3119 if (isArrayLikeType(arr->m_type)) {
3120 result = HHVM_FN(hphp_array_idx)(tvAsCVarRef(arr),
3121 tvAsCVarRef(&key),
3122 tvAsCVarRef(def));
3123 vmStack().popTV();
3124 } else if (arr->m_type == KindOfObject) {
3125 auto obj = arr->m_data.pobj;
3126 if (obj->isCollection() && collections::contains(obj, tvAsCVarRef(&key))) {
3127 result = collections::at(obj, &key).tv();
3128 tvIncRefGen(result);
3129 vmStack().popTV();
3130 } else {
3131 result = *def;
3132 vmStack().discard();
3134 } else if (isStringType(arr->m_type)) {
3135 // This replicates the behavior of the hack implementation of idx, which
3136 // first checks isset($arr[$idx]), then returns $arr[(int)$idx]
3137 auto str = arr->m_data.pstr;
3138 if (IssetElemString<KeyType::Any>(str, key)) {
3139 auto idx = tvCastToInt64(key);
3140 assertx(idx >= 0 && idx < str->size());
3141 result = make_tv<KindOfPersistentString>(str->getChar(idx));
3142 vmStack().popTV();
3143 } else {
3144 result = *def;
3145 vmStack().discard();
3147 } else {
3148 result = *def;
3149 vmStack().discard();
3151 vmStack().popTV();
3152 tvDecRefGen(arr);
3153 *arr = result;
3156 OPTBLD_INLINE void iopArrayIdx() {
3157 TypedValue* def = vmStack().topTV();
3158 auto const key = tvClassToString(*vmStack().indTV(1));
3159 TypedValue* arr = vmStack().indTV(2);
3160 if (isClsMethType(type(arr))) {
3161 tvCastToVecInPlace(arr);
3163 auto const result = HHVM_FN(hphp_array_idx)(tvAsCVarRef(arr),
3164 tvAsCVarRef(&key),
3165 tvAsCVarRef(def));
3166 vmStack().popTV();
3167 vmStack().popTV();
3168 tvDecRefGen(arr);
3169 *arr = result;
3172 namespace {
3173 void implArrayMarkLegacy(bool legacy) {
3174 auto const recursive = *vmStack().topTV();
3175 if (!tvIsBool(recursive)) {
3176 SystemLib::throwInvalidArgumentExceptionObject(
3177 folly::sformat("$recursive must be a bool; got {}",
3178 getDataTypeString(type(recursive))));
3181 auto const input = vmStack().indTV(1);
3182 auto const output = val(recursive).num
3183 ? arrprov::markTvRecursively(*input, legacy)
3184 : arrprov::markTvShallow(*input, legacy);
3186 vmStack().popTV();
3187 tvMove(output, input);
3191 OPTBLD_INLINE void iopArrayMarkLegacy() {
3192 implArrayMarkLegacy(true);
3195 OPTBLD_INLINE void iopArrayUnmarkLegacy() {
3196 implArrayMarkLegacy(false);
3199 OPTBLD_INLINE void iopSetL(tv_lval to) {
3200 TypedValue* fr = vmStack().topC();
3201 tvSet(*fr, to);
3204 OPTBLD_INLINE void iopSetG() {
3205 StringData* name;
3206 TypedValue* fr = vmStack().topC();
3207 TypedValue* tv2 = vmStack().indTV(1);
3208 auto const to = lookupd_gbl(vmfp(), name, tv2);
3209 SCOPE_EXIT { decRefStr(name); };
3210 assertx(to);
3211 tvSet(*fr, to);
3212 memcpy((void*)tv2, (void*)fr, sizeof(TypedValue));
3213 vmStack().discard();
3216 OPTBLD_INLINE void iopSetS(ReadonlyOp op) {
3217 TypedValue* tv1 = vmStack().topTV();
3218 TypedValue* clsCell = vmStack().indC(1);
3219 TypedValue* propn = vmStack().indTV(2);
3220 TypedValue* output = propn;
3221 StringData* name;
3222 TypedValue* val;
3223 bool visible, accessible, readonly, constant;
3224 Slot slot;
3226 if (!isClassType(clsCell->m_type)) {
3227 raise_error("Attempting static property access on non class");
3229 auto const cls = clsCell->m_data.pclass;
3231 lookup_sprop(vmfp(), cls, name, propn, val, slot, visible,
3232 accessible, constant, readonly, true);
3234 SCOPE_EXIT { decRefStr(name); };
3236 if (RO::EvalEnableReadonlyPropertyEnforcement && !readonly &&
3237 op == ReadonlyOp::Readonly) {
3238 throw_or_warn_must_be_readonly(cls->name()->data(), name->data());
3241 if (!(visible && accessible)) {
3242 raise_error("Invalid static property access: %s::%s",
3243 cls->name()->data(),
3244 name->data());
3246 if (constant) {
3247 throw_cannot_modify_static_const_prop(cls->name()->data(), name->data());
3249 if (RuntimeOption::EvalCheckPropTypeHints > 0) {
3250 auto const& sprop = cls->staticProperties()[slot];
3251 auto const& tc = sprop.typeConstraint;
3252 if (tc.isCheckable()) tc.verifyStaticProperty(tv1, cls, sprop.cls, name);
3253 if (RuntimeOption::EvalEnforceGenericsUB > 0) {
3254 for (auto const& ub : sprop.ubs) {
3255 if (ub.isCheckable()) {
3256 ub.verifyStaticProperty(tv1, cls, sprop.cls, name);
3261 always_assert(cls->sPropLink(slot).isLocal());
3262 tvSet(*tv1, *val);
3263 tvDecRefGen(propn);
3264 memcpy(output, tv1, sizeof(TypedValue));
3265 vmStack().ndiscard(2);
3268 OPTBLD_INLINE void iopSetOpL(tv_lval to, SetOpOp op) {
3269 TypedValue* fr = vmStack().topC();
3270 setopBody(to, op, fr);
3271 tvDecRefGen(fr);
3272 tvDup(*to, *fr);
3275 OPTBLD_INLINE void iopSetOpG(SetOpOp op) {
3276 StringData* name;
3277 TypedValue* fr = vmStack().topC();
3278 TypedValue* tv2 = vmStack().indTV(1);
3279 // XXX We're probably not getting warnings totally correct here
3280 auto const to = lookupd_gbl(vmfp(), name, tv2);
3281 SCOPE_EXIT { decRefStr(name); };
3282 assertx(to);
3283 setopBody(to, op, fr);
3284 tvDecRefGen(fr);
3285 tvDecRefGen(tv2);
3286 tvDup(*to, *tv2);
3287 vmStack().discard();
3290 OPTBLD_INLINE void iopSetOpS(SetOpOp op) {
3291 TypedValue* fr = vmStack().topC();
3292 TypedValue* clsCell = vmStack().indC(1);
3293 TypedValue* propn = vmStack().indTV(2);
3294 TypedValue* output = propn;
3295 StringData* name;
3296 TypedValue* val;
3297 bool visible, accessible, readonly, constant;
3298 Slot slot;
3300 if (!isClassType(clsCell->m_type)) {
3301 raise_error("Attempting static property access on non class");
3303 auto const cls = clsCell->m_data.pclass;
3305 lookup_sprop(vmfp(), cls, name, propn, val, slot, visible,
3306 accessible, constant, readonly, false);
3307 SCOPE_EXIT { decRefStr(name); };
3308 if (!(visible && accessible)) {
3309 raise_error("Invalid static property access: %s::%s",
3310 cls->name()->data(),
3311 name->data());
3313 if (constant) {
3314 throw_cannot_modify_static_const_prop(cls->name()->data(), name->data());
3316 auto const& sprop = cls->staticProperties()[slot];
3317 if (setOpNeedsTypeCheck(sprop.typeConstraint, op, val)) {
3318 TypedValue temp;
3319 tvDup(*val, temp);
3320 SCOPE_FAIL { tvDecRefGen(&temp); };
3321 setopBody(&temp, op, fr);
3322 sprop.typeConstraint.verifyStaticProperty(
3323 &temp, cls, sprop.cls, name
3325 always_assert(cls->sPropLink(slot).isLocal());
3326 tvMove(temp, *val);
3327 } else {
3328 always_assert(cls->sPropLink(slot).isLocal());
3329 setopBody(val, op, fr);
3332 tvDecRefGen(propn);
3333 tvDecRefGen(fr);
3334 tvDup(*val, *output);
3335 vmStack().ndiscard(2);
3338 OPTBLD_INLINE void iopIncDecL(named_local_var fr, IncDecOp op) {
3339 TypedValue* to = vmStack().allocTV();
3340 tvWriteUninit(*to);
3341 if (UNLIKELY(type(fr.lval) == KindOfUninit)) {
3342 raise_undefined_local(vmfp(), fr.name);
3343 tvWriteNull(fr.lval);
3345 tvCopy(IncDecBody(op, fr.lval), *to);
3348 OPTBLD_INLINE void iopIncDecG(IncDecOp op) {
3349 StringData* name;
3350 TypedValue* nameCell = vmStack().topTV();
3351 auto const gbl = lookupd_gbl(vmfp(), name, nameCell);
3352 auto oldNameCell = *nameCell;
3353 SCOPE_EXIT {
3354 decRefStr(name);
3355 tvDecRefGen(oldNameCell);
3357 assertx(gbl);
3358 tvCopy(IncDecBody(op, gbl), *nameCell);
3361 OPTBLD_INLINE void iopIncDecS(IncDecOp op) {
3362 SpropState ss(vmStack(), false);
3363 if (!(ss.visible && ss.accessible)) {
3364 raise_error("Invalid static property access: %s::%s",
3365 ss.cls->name()->data(),
3366 ss.name->data());
3368 if (ss.constant) {
3369 throw_cannot_modify_static_const_prop(ss.cls->name()->data(),
3370 ss.name->data());
3372 auto const checkable_sprop = [&]() -> const Class::SProp* {
3373 if (RuntimeOption::EvalCheckPropTypeHints <= 0) return nullptr;
3374 auto const& sprop = ss.cls->staticProperties()[ss.slot];
3375 return sprop.typeConstraint.isCheckable() ? &sprop : nullptr;
3376 }();
3378 auto const val = ss.val;
3379 if (checkable_sprop) {
3380 TypedValue temp;
3381 tvDup(*val, temp);
3382 SCOPE_FAIL { tvDecRefGen(&temp); };
3383 auto result = IncDecBody(op, &temp);
3384 SCOPE_FAIL { tvDecRefGen(&result); };
3385 checkable_sprop->typeConstraint.verifyStaticProperty(
3386 &temp,
3387 ss.cls,
3388 checkable_sprop->cls,
3389 ss.name
3391 always_assert(ss.cls->sPropLink(ss.slot).isLocal());
3392 tvMove(temp, *val);
3393 tvCopy(result, *ss.output);
3394 } else {
3395 always_assert(ss.cls->sPropLink(ss.slot).isLocal());
3396 tvCopy(IncDecBody(op, val), *ss.output);
3400 OPTBLD_INLINE void iopUnsetL(tv_lval loc) {
3401 tvUnset(loc);
3404 OPTBLD_INLINE void iopUnsetG() {
3405 TypedValue* tv1 = vmStack().topTV();
3406 StringData* name = lookup_name(tv1);
3407 SCOPE_EXIT { decRefStr(name); };
3408 auto env = g_context->m_globalNVTable;
3409 assertx(env != nullptr);
3410 env->unset(name);
3411 vmStack().popC();
3414 void funcEntry() {
3417 bool doFCall(CallFlags callFlags, const Func* func, uint32_t numArgsInclUnpack,
3418 void* ctx, TCA retAddr) {
3419 TRACE(3, "FCall: pc %p func %p\n", vmpc(), vmfp()->func()->entry());
3421 assertx(numArgsInclUnpack <= func->numNonVariadicParams() + 1);
3422 assertx(kNumActRecCells == 2);
3423 ActRec* ar = vmStack().indA(
3424 numArgsInclUnpack + (callFlags.hasGenerics() ? 1 : 0));
3426 // Callee checks and input initialization.
3427 calleeGenericsChecks(func, callFlags.hasGenerics());
3428 calleeArgumentArityChecks(func, numArgsInclUnpack);
3429 calleeDynamicCallChecks(func, callFlags.isDynamicCall());
3430 calleeCoeffectChecks(func, callFlags.coeffects(), numArgsInclUnpack, ctx);
3431 func->recordCall();
3432 initFuncInputs(func, numArgsInclUnpack);
3434 ar->m_sfp = vmfp();
3435 ar->setFunc(func);
3436 ar->setJitReturn(retAddr);
3437 ar->m_callOffAndFlags = ActRec::encodeCallOffsetAndFlags(
3438 callFlags.callOffset(),
3439 callFlags.asyncEagerReturn() ? (1 << ActRec::AsyncEagerRet) : 0
3441 ar->setThisOrClassAllowNull(ctx);
3443 try {
3444 prepareFuncEntry(ar, numArgsInclUnpack);
3446 return EventHook::FunctionCall(
3448 EventHook::NormalFunc,
3449 EventHook::Source::Interpreter
3451 } catch (...) {
3452 // Manually unwind the pre-live or live frame, as we may be called from JIT
3453 // and expected to enter JIT unwinder with vmfp() set to the callee.
3454 assertx(vmfp() == ar || vmfp() == ar->m_sfp);
3456 auto const func = ar->func();
3457 auto const numInOutParams = func->numInOutParamsForArgs(numArgsInclUnpack);
3459 if (ar->m_sfp == vmfp()) {
3460 // Unwind pre-live frame.
3461 assertx(vmStack().top() <= (void*)ar);
3462 while (vmStack().top() != (void*)ar) {
3463 vmStack().popTV();
3465 vmStack().popAR();
3466 } else {
3467 // Unwind live frame.
3468 vmfp() = ar->m_sfp;
3469 vmpc() = vmfp()->func()->entry() + ar->callOffset();
3470 assertx(vmStack().top() + func->numSlotsInFrame() <= (void*)ar);
3471 while (vmStack().top() + func->numSlotsInFrame() != (void*)ar) {
3472 vmStack().popTV();
3474 frame_free_locals_inl_no_hook(ar, func->numLocals());
3475 vmStack().ndiscard(func->numSlotsInFrame());
3476 vmStack().discardAR();
3478 vmStack().ndiscard(numInOutParams);
3479 throw;
3483 namespace {
3485 enum class NoCtx {};
3487 void* takeCtx(Class* cls) { return cls; }
3488 void* takeCtx(Object& obj) = delete;
3489 void* takeCtx(Object&& obj) { return obj.detach(); }
3490 void* takeCtx(NoCtx) {
3491 if (debug) return reinterpret_cast<void*>(ActRec::kTrashedThisSlot);
3492 return nullptr;
3495 template<bool dynamic, typename Ctx>
3496 TCA fcallImpl(bool retToJit, PC origpc, PC& pc, const FCallArgs& fca,
3497 const Func* func, Ctx&& ctx, bool logAsDynamicCall = true,
3498 bool isCtor = false) {
3499 if (fca.enforceInOut()) checkInOutMismatch(func, fca.numArgs, fca.inoutArgs);
3500 if (fca.enforceReadonly()) {
3501 checkReadonlyMismatch(func, fca.numArgs, fca.readonlyArgs);
3503 if (fca.enforceMutableReturn() && (func->attrs() & AttrReadonlyReturn)) {
3504 throwReadonlyMismatch(func, kReadonlyReturnId);
3506 if (fca.enforceReadonlyThis() && !(func->attrs() & AttrReadonlyThis)) {
3507 throwReadonlyMismatch(func, kReadonlyThisId);
3509 if (dynamic && logAsDynamicCall) callerDynamicCallChecks(func);
3510 checkStack(vmStack(), func, 0);
3512 auto const numArgsInclUnpack = [&] {
3513 if (UNLIKELY(fca.hasUnpack())) {
3514 checkStack(vmStack(), func, 0);
3516 GenericsSaver gs{fca.hasGenerics()};
3517 return prepareUnpackArgs(func, fca.numArgs, true);
3520 if (UNLIKELY(fca.numArgs > func->numNonVariadicParams())) {
3521 GenericsSaver gs{fca.hasGenerics()};
3522 iopNewVec(fca.numArgs - func->numNonVariadicParams());
3523 return func->numNonVariadicParams() + 1;
3526 return fca.numArgs;
3527 }();
3529 auto const callFlags = CallFlags(
3530 fca.hasGenerics(),
3531 dynamic,
3532 fca.asyncEagerOffset != kInvalidOffset && func->supportsAsyncEagerReturn(),
3533 Offset(origpc - vmfp()->func()->entry()),
3534 0, // generics bitmap not used by interpreter
3535 vmfp()->providedCoeffectsForCall(isCtor)
3538 auto const notIntercepted = doFCall(
3539 callFlags, func, numArgsInclUnpack, takeCtx(std::forward<Ctx>(ctx)),
3540 jit::tc::ustubs().retHelper);
3542 if (UNLIKELY(!notIntercepted)) {
3543 // The callee was intercepted and should be skipped.
3544 pc = vmpc();
3545 return nullptr;
3548 if (retToJit) {
3549 // Let JIT handle FuncEntry if possible.
3550 pc = vmpc();
3551 return jit::tc::ustubs().resumeHelperFuncEntry;
3554 funcEntry();
3555 pc = vmpc();
3556 return nullptr;
3559 const StaticString s___invoke("__invoke");
3561 // This covers both closures and functors.
3562 OPTBLD_INLINE TCA fcallFuncObj(bool retToJit, PC origpc, PC& pc,
3563 const FCallArgs& fca) {
3564 assertx(tvIsObject(vmStack().topC()));
3565 auto obj = Object::attach(vmStack().topC()->m_data.pobj);
3566 vmStack().discard();
3568 auto const cls = obj->getVMClass();
3569 auto const func = cls->lookupMethod(s___invoke.get());
3571 if (func == nullptr) {
3572 raise_error(Strings::FUNCTION_NAME_MUST_BE_STRING);
3575 if (func->isStaticInPrologue()) {
3576 obj.reset();
3577 return fcallImpl<false>(retToJit, origpc, pc, fca, func, cls);
3578 } else {
3579 return fcallImpl<false>(retToJit, origpc, pc, fca, func, std::move(obj));
3584 * Supports callables:
3585 * array($instance, 'method')
3586 * array('Class', 'method'),
3587 * vec[$instance, 'method'],
3588 * vec['Class', 'method'],
3589 * dict[0 => $instance, 1 => 'method'],
3590 * dict[0 => 'Class', 1 => 'method'],
3591 * array(Class*, Func*),
3592 * array(ObjectData*, Func*),
3594 OPTBLD_INLINE TCA fcallFuncArr(bool retToJit, PC origpc, PC& pc,
3595 const FCallArgs& fca) {
3596 assertx(tvIsArrayLike(vmStack().topC()));
3597 auto arr = Array::attach(vmStack().topC()->m_data.parr);
3598 vmStack().discard();
3600 ObjectData* thiz = nullptr;
3601 HPHP::Class* cls = nullptr;
3602 bool dynamic = false;
3604 auto const func = vm_decode_function(const_variant_ref{arr}, vmfp(), thiz,
3605 cls, dynamic, DecodeFlags::NoWarn);
3606 assertx(dynamic);
3607 if (UNLIKELY(func == nullptr)) {
3608 raise_error("Invalid callable (array)");
3611 Object thisRC(thiz);
3612 arr.reset();
3614 if (thisRC) {
3615 return fcallImpl<true>(retToJit, origpc, pc, fca, func, std::move(thisRC));
3616 } else if (cls) {
3617 return fcallImpl<true>(retToJit, origpc, pc, fca, func, cls);
3618 } else {
3619 return fcallImpl<true>(retToJit, origpc, pc, fca, func, NoCtx{});
3624 * Supports callables:
3625 * 'func_name'
3626 * 'class::method'
3628 OPTBLD_INLINE TCA fcallFuncStr(bool retToJit, PC origpc, PC& pc,
3629 const FCallArgs& fca) {
3630 assertx(tvIsString(vmStack().topC()));
3631 auto str = String::attach(vmStack().topC()->m_data.pstr);
3632 vmStack().discard();
3634 ObjectData* thiz = nullptr;
3635 HPHP::Class* cls = nullptr;
3636 bool dynamic = false;
3638 auto const func = vm_decode_function(const_variant_ref{str}, vmfp(), thiz,
3639 cls, dynamic, DecodeFlags::NoWarn);
3640 assertx(dynamic);
3641 if (UNLIKELY(func == nullptr)) {
3642 raise_call_to_undefined(str.get());
3645 Object thisRC(thiz);
3646 str.reset();
3648 if (thisRC) {
3649 return fcallImpl<true>(retToJit, origpc, pc, fca, func, std::move(thisRC));
3650 } else if (cls) {
3651 return fcallImpl<true>(retToJit, origpc, pc, fca, func, cls);
3652 } else {
3653 return fcallImpl<true>(retToJit, origpc, pc, fca, func, NoCtx{});
3657 OPTBLD_INLINE TCA fcallFuncFunc(bool retToJit, PC origpc, PC& pc,
3658 const FCallArgs& fca) {
3659 assertx(tvIsFunc(vmStack().topC()));
3660 auto func = vmStack().topC()->m_data.pfunc;
3661 vmStack().discard();
3663 if (func->cls()) {
3664 raise_error(Strings::CALL_ILLFORMED_FUNC);
3667 return fcallImpl<false>(retToJit, origpc, pc, fca, func, NoCtx{});
3670 OPTBLD_INLINE TCA fcallFuncRFunc(bool retToJit, PC origpc, PC& pc,
3671 FCallArgs& fca) {
3672 assertx(tvIsRFunc(vmStack().topC()));
3673 auto const rfunc = vmStack().topC()->m_data.prfunc;
3674 auto const func = rfunc->m_func;
3675 vmStack().discard();
3676 vmStack().pushArrayLike(rfunc->m_arr);
3677 decRefRFunc(rfunc);
3679 return
3680 fcallImpl<false>(retToJit, origpc, pc, fca.withGenerics(), func, NoCtx{});
3683 OPTBLD_INLINE TCA fcallFuncClsMeth(bool retToJit, PC origpc, PC& pc,
3684 const FCallArgs& fca) {
3685 assertx(tvIsClsMeth(vmStack().topC()));
3686 auto const clsMeth = vmStack().topC()->m_data.pclsmeth;
3687 vmStack().discard();
3689 const Func* func = clsMeth->getFunc();
3690 auto const cls = clsMeth->getCls();
3691 assertx(func && cls);
3693 return fcallImpl<false>(retToJit, origpc, pc, fca, func, cls);
3696 OPTBLD_INLINE TCA fcallFuncRClsMeth(bool retToJit, PC origpc, PC& pc,
3697 const FCallArgs& fca) {
3698 assertx(tvIsRClsMeth(vmStack().topC()));
3699 auto const rclsMeth = vmStack().topC()->m_data.prclsmeth;
3700 auto const cls = rclsMeth->m_cls;
3701 auto const func = rclsMeth->m_func;
3702 vmStack().discard();
3703 vmStack().pushArrayLike(rclsMeth->m_arr);
3704 decRefRClsMeth(rclsMeth);
3706 return fcallImpl<false>(retToJit, origpc, pc, fca.withGenerics(), func, cls);
3709 Func* resolveFuncImpl(Id id) {
3710 auto unit = vmfp()->func()->unit();
3711 auto const nep = unit->lookupNamedEntityPairId(id);
3712 auto func = Func::load(nep.second, nep.first);
3713 if (func == nullptr) raise_resolve_undefined(unit->lookupLitstrId(id));
3714 return func;
3717 OPTBLD_INLINE void iopResolveFunc(Id id) {
3718 auto func = resolveFuncImpl(id);
3719 vmStack().pushFunc(func);
3722 OPTBLD_INLINE void iopResolveMethCaller(Id id) {
3723 auto unit = vmfp()->func()->unit();
3724 auto const nep = unit->lookupNamedEntityPairId(id);
3725 auto func = Func::load(nep.second, nep.first);
3726 assertx(func && func->isMethCaller());
3727 checkMethCaller(func, arGetContextClass(vmfp()));
3728 vmStack().pushFunc(func);
3731 RFuncData* newRFuncImpl(Func* func, ArrayData* reified_generics) {
3732 auto rfunc = RFuncData::newInstance(func, reified_generics);
3733 TRACE(2, "ResolveRFunc: just created new rfunc %s: %p\n",
3734 func->name()->data(), rfunc);
3735 return rfunc;
3738 } // namespace
3740 OPTBLD_INLINE void iopResolveRFunc(Id id) {
3741 auto const tsList = vmStack().topC();
3743 // Should I refactor this out with iopNewObj*?
3744 auto const reified = [&] () -> ArrayData* {
3745 if (!tvIsVec(tsList)) {
3746 raise_error("Attempting ResolveRFunc with invalid reified generics");
3748 return tsList->m_data.parr;
3749 }();
3751 auto func = resolveFuncImpl(id);
3752 if (!func->hasReifiedGenerics()) {
3753 vmStack().popC();
3754 vmStack().pushFunc(func);
3755 } else {
3756 checkFunReifiedGenericMismatch(func, reified);
3757 auto rfunc = newRFuncImpl(func, reified);
3758 vmStack().discard();
3759 vmStack().pushRFuncNoRc(rfunc);
3763 OPTBLD_INLINE TCA iopFCallFunc(bool retToJit, PC origpc, PC& pc,
3764 FCallArgs fca) {
3765 auto const type = vmStack().topC()->m_type;
3766 if (isObjectType(type)) return fcallFuncObj(retToJit, origpc, pc, fca);
3767 if (isArrayLikeType(type)) return fcallFuncArr(retToJit, origpc, pc, fca);
3768 if (isStringType(type)) return fcallFuncStr(retToJit, origpc, pc, fca);
3769 if (isFuncType(type)) return fcallFuncFunc(retToJit, origpc, pc, fca);
3770 if (isRFuncType(type)) return fcallFuncRFunc(retToJit, origpc, pc, fca);
3771 if (isClsMethType(type)) return fcallFuncClsMeth(retToJit, origpc, pc, fca);
3772 if (isRClsMethType(type)) return fcallFuncRClsMeth(retToJit, origpc, pc, fca);
3774 raise_error(Strings::FUNCTION_NAME_MUST_BE_STRING);
3777 OPTBLD_INLINE TCA iopFCallFuncD(bool retToJit, PC origpc, PC& pc, FCallArgs fca,
3778 Id id) {
3779 auto const nep = vmfp()->unit()->lookupNamedEntityPairId(id);
3780 auto const func = Func::load(nep.second, nep.first);
3781 if (UNLIKELY(func == nullptr)) {
3782 raise_call_to_undefined(vmfp()->unit()->lookupLitstrId(id));
3785 return fcallImpl<false>(retToJit, origpc, pc, fca, func, NoCtx{});
3788 namespace {
3790 const StaticString
3791 s_DynamicContextOverrideUnsafe("__SystemLib\\DynamicContextOverrideUnsafe");
3793 template<bool dynamic>
3794 TCA fcallObjMethodImpl(bool retToJit, PC origpc, PC& pc, const FCallArgs& fca,
3795 StringData* methName) {
3796 const Func* func;
3797 LookupResult res;
3798 assertx(tvIsObject(vmStack().indC(fca.numInputs() + (kNumActRecCells - 1))));
3799 auto const obj =
3800 vmStack().indC(fca.numInputs() + (kNumActRecCells - 1))->m_data.pobj;
3801 auto cls = obj->getVMClass();
3802 auto const ctx = [&] {
3803 if (!fca.context) return arGetContextClass(vmfp());
3804 if (fca.context->isame(s_DynamicContextOverrideUnsafe.get())) {
3805 if (RO::RepoAuthoritative) {
3806 raise_error("Cannot use dynamic_meth_caller_force() in repo-mode");
3808 return cls;
3810 return Class::load(fca.context);
3811 }();
3812 // if lookup throws, obj will be decref'd via stack
3813 res = lookupObjMethod(func, cls, methName, ctx,
3814 MethodLookupErrorOptions::RaiseOnNotFound);
3815 assertx(func);
3816 decRefStr(methName);
3817 if (res == LookupResult::MethodFoundNoThis) {
3818 throw_has_this_need_static(func);
3820 assertx(res == LookupResult::MethodFoundWithThis);
3822 if (func->hasReifiedGenerics() && !fca.hasGenerics() &&
3823 !func->getReifiedGenericsInfo().allGenericsSoft()) {
3824 throw_call_reified_func_without_generics(func);
3827 // fcallImpl() will do further checks before spilling the ActRec. If any
3828 // of these checks fail, make sure it gets decref'd only via ctx.
3829 tvWriteNull(*vmStack().indC(fca.numInputs() + (kNumActRecCells - 1)));
3830 return
3831 fcallImpl<dynamic>(retToJit, origpc, pc, fca, func, Object::attach(obj));
3834 static void raise_resolve_non_object(const char* methodName,
3835 const char* typeName = nullptr) {
3836 auto const msg = folly::sformat(
3837 "Cannot resolve a member function {}() on a non-object ({})",
3838 methodName, typeName
3841 raise_fatal_error(msg.c_str());
3844 static void throw_call_non_object(const char* methodName,
3845 const char* typeName = nullptr) {
3846 std::string msg;
3847 folly::format(&msg, "Call to a member function {}() on a non-object ({})",
3848 methodName, typeName);
3850 if (RuntimeOption::ThrowExceptionOnBadMethodCall) {
3851 SystemLib::throwBadMethodCallExceptionObject(String(msg));
3853 raise_fatal_error(msg.c_str());
3856 ALWAYS_INLINE bool
3857 fcallObjMethodHandleInput(const FCallArgs& fca, ObjMethodOp op,
3858 const StringData* methName, bool extraStk) {
3859 TypedValue* obj = vmStack().indC(fca.numInputs()
3860 + (kNumActRecCells - 1)
3861 + (extraStk ? 1 : 0));
3862 if (LIKELY(isObjectType(obj->m_type))) return false;
3864 if (UNLIKELY(op == ObjMethodOp::NullThrows || !isNullType(obj->m_type))) {
3865 auto const dataTypeStr = getDataTypeString(obj->m_type).get();
3866 throw_call_non_object(methName->data(), dataTypeStr->data());
3869 // null?->method(...), pop extra stack input, all arguments and two uninits,
3870 // the null "object" and all uninits for inout returns, then push null.
3871 auto& stack = vmStack();
3872 if (extraStk) stack.popC();
3873 if (fca.hasGenerics()) stack.popC();
3874 if (fca.hasUnpack()) stack.popC();
3876 // Save any inout arguments, as those will be pushed unchanged as
3877 // the output.
3878 std::vector<TypedValue> inOuts;
3879 for (uint32_t i = 0; i < fca.numArgs; ++i) {
3880 if (fca.enforceInOut() && fca.isInOut(fca.numArgs - i - 1)) {
3881 inOuts.emplace_back(*stack.top());
3882 stack.discard();
3883 } else {
3884 stack.popTV();
3887 stack.popU();
3888 stack.popC();
3889 for (uint32_t i = 0; i < fca.numRets - 1; ++i) stack.popU();
3891 assertx(inOuts.size() == fca.numRets - 1);
3892 for (auto const tv : inOuts) *stack.allocC() = tv;
3893 stack.pushNull();
3895 // Handled.
3896 return true;
3899 } // namespace
3901 OPTBLD_INLINE TCA
3902 iopFCallObjMethod(bool retToJit, PC origpc, PC& pc, FCallArgs fca,
3903 const StringData*, ObjMethodOp op) {
3904 TypedValue* c1 = vmStack().topC(); // Method name.
3905 if (!isStringType(c1->m_type)) {
3906 raise_error(Strings::METHOD_NAME_MUST_BE_STRING);
3909 StringData* methName = c1->m_data.pstr;
3910 if (fcallObjMethodHandleInput(fca, op, methName, true)) return nullptr;
3912 // We handle decReffing method name in fcallObjMethodImpl
3913 vmStack().discard();
3914 return fcallObjMethodImpl<true>(retToJit, origpc, pc, fca, methName);
3917 OPTBLD_INLINE TCA
3918 iopFCallObjMethodD(bool retToJit, PC origpc, PC& pc, FCallArgs fca,
3919 const StringData*, ObjMethodOp op,
3920 const StringData* methName) {
3921 if (fcallObjMethodHandleInput(fca, op, methName, false)) return nullptr;
3922 auto const methNameC = const_cast<StringData*>(methName);
3923 return fcallObjMethodImpl<false>(retToJit, origpc, pc, fca, methNameC);
3926 namespace {
3928 Class* specialClsRefToCls(SpecialClsRef ref) {
3929 switch (ref) {
3930 case SpecialClsRef::Static:
3931 if (auto const cls = frameStaticClass(vmfp())) return cls;
3932 raise_error(HPHP::Strings::CANT_ACCESS_STATIC);
3933 case SpecialClsRef::Self:
3934 if (auto const cls = arGetContextClass(vmfp())) return cls;
3935 raise_error(HPHP::Strings::CANT_ACCESS_SELF);
3936 case SpecialClsRef::Parent:
3937 if (auto const cls = arGetContextClass(vmfp())) {
3938 if (auto const parent = cls->parent()) return parent;
3939 raise_error(HPHP::Strings::CANT_ACCESS_PARENT_WHEN_NO_PARENT);
3941 raise_error(HPHP::Strings::CANT_ACCESS_PARENT_WHEN_NO_CLASS);
3943 always_assert(false);
3946 const Func* resolveClsMethodFunc(Class* cls, const StringData* methName) {
3947 const Func* func;
3948 auto const res = lookupClsMethod(func, cls, methName, nullptr,
3949 arGetContextClass(vmfp()),
3950 MethodLookupErrorOptions::None);
3951 if (res == LookupResult::MethodNotFound) {
3952 raise_error("Failure to resolve method name \'%s::%s\'",
3953 cls->name()->data(), methName->data());
3955 assertx(res == LookupResult::MethodFoundNoThis);
3956 assertx(func);
3957 checkClsMethFuncHelper(func);
3958 return func;
3961 template<bool extraStk = false>
3962 void resolveClsMethodImpl(Class* cls, const StringData* methName) {
3963 const Func* func = resolveClsMethodFunc(cls, methName);
3964 auto clsmeth = ClsMethDataRef::create(cls, const_cast<Func*>(func));
3965 if (extraStk) vmStack().popC();
3966 vmStack().pushClsMethNoRc(clsmeth);
3969 } // namespace
3971 OPTBLD_INLINE void iopResolveClsMethod(const StringData* methName) {
3972 auto const c = vmStack().topC();
3973 if (!isClassType(c->m_type)) {
3974 raise_error("Attempting ResolveClsMethod with non-class");
3976 resolveClsMethodImpl<true>(c->m_data.pclass, methName);
3979 OPTBLD_INLINE void iopResolveClsMethodD(Id classId,
3980 const StringData* methName) {
3981 auto const nep = vmfp()->func()->unit()->lookupNamedEntityPairId(classId);
3982 auto cls = Class::load(nep.second, nep.first);
3983 if (UNLIKELY(cls == nullptr)) {
3984 raise_error("Failure to resolve class name \'%s\'", nep.first->data());
3986 resolveClsMethodImpl(cls, methName);
3989 OPTBLD_INLINE void iopResolveClsMethodS(SpecialClsRef ref,
3990 const StringData* methName) {
3991 resolveClsMethodImpl(specialClsRefToCls(ref), methName);
3994 namespace {
3996 template<bool extraStk = false>
3997 void resolveRClsMethodImpl(Class* cls, const StringData* methName) {
3998 const Func* func = resolveClsMethodFunc(cls, methName);
4000 auto const tsList = vmStack().topC();
4001 auto const reified = [&] () -> ArrayData* {
4002 if (!tvIsVec(tsList)) {
4003 raise_error("Invalid reified generics when resolving class method");
4005 return tsList->m_data.parr;
4006 }();
4008 if (func->hasReifiedGenerics()) {
4009 checkFunReifiedGenericMismatch(func, reified);
4010 auto rclsmeth = RClsMethData::create(cls, const_cast<Func*>(func), reified);
4011 vmStack().discard();
4012 if (extraStk) vmStack().popC();
4013 vmStack().pushRClsMethNoRc(rclsmeth);
4014 } else {
4015 auto clsmeth = ClsMethDataRef::create(cls, const_cast<Func*>(func));
4016 vmStack().popC();
4017 if (extraStk) vmStack().popC();
4018 vmStack().pushClsMethNoRc(clsmeth);
4022 } // namespace
4024 OPTBLD_INLINE void iopResolveRClsMethod(const StringData* methName) {
4025 auto const c = vmStack().indC(1);
4026 if (!isClassType(c->m_type)) {
4027 raise_error("Attempting ResolveRClsMethod with non-class");
4029 resolveRClsMethodImpl<true>(c->m_data.pclass, methName);
4032 OPTBLD_INLINE void iopResolveRClsMethodD(Id classId,
4033 const StringData* methName) {
4034 auto const nep = vmfp()->func()->unit()->lookupNamedEntityPairId(classId);
4035 auto cls = Class::load(nep.second, nep.first);
4036 if (UNLIKELY(cls == nullptr)) {
4037 raise_error("Failure to resolve class name \'%s\'", nep.first->data());
4039 resolveRClsMethodImpl<false>(cls, methName);
4042 OPTBLD_INLINE void iopResolveRClsMethodS(SpecialClsRef ref,
4043 const StringData* methName) {
4044 resolveRClsMethodImpl<false>(specialClsRefToCls(ref), methName);
4047 namespace {
4049 template<bool dynamic>
4050 TCA fcallClsMethodImpl(bool retToJit, PC origpc, PC& pc, const FCallArgs& fca,
4051 Class* cls, StringData* methName, bool forwarding,
4052 bool logAsDynamicCall = true) {
4053 auto const ctx = [&] {
4054 if (!fca.context) return liveClass();
4055 if (fca.context->isame(s_DynamicContextOverrideUnsafe.get())) {
4056 if (RO::RepoAuthoritative) {
4057 raise_error("Cannot use dynamic_meth_caller_force() in repo-mode");
4059 return cls;
4061 return Class::load(fca.context);
4062 }();
4063 auto obj = liveClass() && vmfp()->hasThis() ? vmfp()->getThis() : nullptr;
4064 const Func* func;
4065 auto const res = lookupClsMethod(func, cls, methName, obj, ctx,
4066 MethodLookupErrorOptions::RaiseOnNotFound);
4067 assertx(func);
4068 decRefStr(methName);
4070 if (res == LookupResult::MethodFoundNoThis) {
4071 if (!func->isStaticInPrologue()) {
4072 throw_missing_this(func);
4074 obj = nullptr;
4075 } else {
4076 assertx(obj);
4077 assertx(res == LookupResult::MethodFoundWithThis);
4080 if (func->hasReifiedGenerics() && !fca.hasGenerics() &&
4081 !func->getReifiedGenericsInfo().allGenericsSoft()) {
4082 throw_call_reified_func_without_generics(func);
4085 if (obj) {
4086 return fcallImpl<dynamic>(
4087 retToJit, origpc, pc, fca, func, Object(obj), logAsDynamicCall);
4088 } else {
4089 if (forwarding && ctx) {
4090 /* Propagate the current late bound class if there is one, */
4091 /* otherwise use the class given by this instruction's input */
4092 if (vmfp()->hasThis()) {
4093 cls = vmfp()->getThis()->getVMClass();
4094 } else {
4095 cls = vmfp()->getClass();
4098 return fcallImpl<dynamic>(
4099 retToJit, origpc, pc, fca, func, cls, logAsDynamicCall);
4103 } // namespace
4105 OPTBLD_INLINE TCA
4106 iopFCallClsMethod(bool retToJit, PC origpc, PC& pc, FCallArgs fca,
4107 const StringData*, IsLogAsDynamicCallOp op) {
4108 auto const c1 = vmStack().topC();
4109 if (!isClassType(c1->m_type)) {
4110 raise_error("Attempting to use non-class in FCallClsMethod");
4112 auto const cls = c1->m_data.pclass;
4114 auto const c2 = vmStack().indC(1); // Method name.
4115 if (!isStringType(c2->m_type)) {
4116 raise_error(Strings::FUNCTION_NAME_MUST_BE_STRING);
4118 auto methName = c2->m_data.pstr;
4120 // fcallClsMethodImpl will take care of decReffing method name
4121 vmStack().ndiscard(2);
4122 assertx(cls && methName);
4123 auto const logAsDynamicCall = op == IsLogAsDynamicCallOp::LogAsDynamicCall ||
4124 RuntimeOption::EvalLogKnownMethodsAsDynamicCalls;
4125 return fcallClsMethodImpl<true>(
4126 retToJit, origpc, pc, fca, cls, methName, false, logAsDynamicCall);
4129 OPTBLD_INLINE TCA
4130 iopFCallClsMethodD(bool retToJit, PC origpc, PC& pc, FCallArgs fca,
4131 const StringData*, Id classId, const StringData* methName) {
4132 const NamedEntityPair &nep =
4133 vmfp()->func()->unit()->lookupNamedEntityPairId(classId);
4134 Class* cls = Class::load(nep.second, nep.first);
4135 if (cls == nullptr) {
4136 raise_error(Strings::UNKNOWN_CLASS, nep.first->data());
4138 auto const methNameC = const_cast<StringData*>(methName);
4139 return fcallClsMethodImpl<false>(
4140 retToJit, origpc, pc, fca, cls, methNameC, false);
4143 OPTBLD_INLINE TCA
4144 iopFCallClsMethodS(bool retToJit, PC origpc, PC& pc, FCallArgs fca,
4145 const StringData*, SpecialClsRef ref) {
4146 auto const c1 = vmStack().topC(); // Method name.
4147 if (!isStringType(c1->m_type)) {
4148 raise_error(Strings::FUNCTION_NAME_MUST_BE_STRING);
4150 auto const cls = specialClsRefToCls(ref);
4151 auto methName = c1->m_data.pstr;
4153 // fcallClsMethodImpl will take care of decReffing name
4154 vmStack().ndiscard(1);
4155 auto const fwd = ref == SpecialClsRef::Self || ref == SpecialClsRef::Parent;
4156 return fcallClsMethodImpl<true>(
4157 retToJit, origpc, pc, fca, cls, methName, fwd);
4160 OPTBLD_INLINE TCA
4161 iopFCallClsMethodSD(bool retToJit, PC origpc, PC& pc, FCallArgs fca,
4162 const StringData*, SpecialClsRef ref,
4163 const StringData* methName) {
4164 auto const cls = specialClsRefToCls(ref);
4165 auto const methNameC = const_cast<StringData*>(methName);
4166 auto const fwd = ref == SpecialClsRef::Self || ref == SpecialClsRef::Parent;
4167 return fcallClsMethodImpl<false>(
4168 retToJit, origpc, pc, fca, cls, methNameC, fwd);
4171 namespace {
4173 ObjectData* newObjImpl(Class* cls, ArrayData* reified_types) {
4174 // Replace input with uninitialized instance.
4175 auto this_ = reified_types
4176 ? ObjectData::newInstanceReified<true>(cls, reified_types)
4177 : ObjectData::newInstance<true>(cls);
4178 TRACE(2, "NewObj: just new'ed an instance of class %s: %p\n",
4179 cls->name()->data(), this_);
4180 bespoke::profileArrLikeProps(this_);
4181 return this_;
4184 void newObjDImpl(Id id, ArrayData* reified_types) {
4185 const NamedEntityPair &nep =
4186 vmfp()->func()->unit()->lookupNamedEntityPairId(id);
4187 auto cls = Class::load(nep.second, nep.first);
4188 if (cls == nullptr) {
4189 raise_error(Strings::UNKNOWN_CLASS,
4190 vmfp()->func()->unit()->lookupLitstrId(id)->data());
4192 auto this_ = newObjImpl(cls, reified_types);
4193 if (reified_types) vmStack().popC();
4194 vmStack().pushObjectNoRc(this_);
4197 } // namespace
4199 OPTBLD_INLINE void iopNewObj() {
4200 auto const clsCell = vmStack().topC();
4201 if (!isClassType(clsCell->m_type)) {
4202 raise_error("Attempting NewObj with non-class");
4204 auto const cls = clsCell->m_data.pclass;
4206 callerDynamicConstructChecks(cls);
4207 auto this_ = newObjImpl(cls, nullptr);
4208 vmStack().popC();
4209 vmStack().pushObjectNoRc(this_);
4212 OPTBLD_INLINE void iopNewObjR() {
4213 auto const reifiedCell = vmStack().topC();
4214 auto const clsCell = vmStack().indC(1);
4216 if (!isClassType(clsCell->m_type)) {
4217 raise_error("Attempting NewObjR with non-class");
4219 auto const cls = clsCell->m_data.pclass;
4221 auto const reified = [&] () -> ArrayData* {
4222 if (reifiedCell->m_type == KindOfNull) return nullptr;
4223 if (!tvIsVec(reifiedCell)) {
4224 raise_error("Attempting NewObjR with invalid reified generics");
4226 return reifiedCell->m_data.parr;
4227 }();
4229 callerDynamicConstructChecks(cls);
4230 auto this_ = newObjImpl(cls, reified);
4231 vmStack().popC();
4232 vmStack().popC();
4233 vmStack().pushObjectNoRc(this_);
4236 OPTBLD_INLINE void iopNewObjD(Id id) {
4237 newObjDImpl(id, nullptr);
4240 OPTBLD_INLINE void iopNewObjRD(Id id) {
4241 auto const tsList = vmStack().topC();
4243 auto const reified = [&] () -> ArrayData* {
4244 if (tsList->m_type == KindOfNull) return nullptr;
4245 if (!tvIsVec(tsList)) {
4246 raise_error("Attempting NewObjRD with invalid reified generics");
4248 return tsList->m_data.parr;
4249 }();
4250 newObjDImpl(id, reified);
4253 OPTBLD_INLINE void iopNewObjS(SpecialClsRef ref) {
4254 auto const cls = specialClsRefToCls(ref);
4255 if (ref == SpecialClsRef::Static && cls->hasReifiedGenerics()) {
4256 raise_error(Strings::NEW_STATIC_ON_REIFIED_CLASS, cls->name()->data());
4258 auto const reified_generics = cls->hasReifiedGenerics()
4259 ? getClsReifiedGenericsProp(cls, vmfp()) : nullptr;
4260 auto this_ = newObjImpl(cls, reified_generics);
4261 vmStack().pushObjectNoRc(this_);
4264 OPTBLD_INLINE TCA iopFCallCtor(bool retToJit, PC origpc, PC& pc, FCallArgs fca,
4265 const StringData*) {
4266 assertx(fca.numRets == 1);
4267 assertx(fca.asyncEagerOffset == kInvalidOffset);
4268 assertx(tvIsObject(vmStack().indC(fca.numInputs() + (kNumActRecCells - 1))));
4269 auto const obj =
4270 vmStack().indC(fca.numInputs() + (kNumActRecCells - 1))->m_data.pobj;
4272 const Func* func;
4273 auto const ctx = arGetContextClass(vmfp());
4274 auto const res UNUSED = lookupCtorMethod(func, obj->getVMClass(), ctx,
4275 MethodLookupErrorOptions::RaiseOnNotFound);
4276 assertx(res == LookupResult::MethodFoundWithThis);
4278 // fcallImpl() will do further checks before spilling the ActRec. If any
4279 // of these checks fail, make sure it gets decref'd only via ctx.
4280 tvWriteNull(*vmStack().indC(fca.numInputs() + (kNumActRecCells - 1)));
4281 return fcallImpl<false>(
4282 retToJit, origpc, pc, fca, func, Object::attach(obj), true, true);
4285 OPTBLD_INLINE void iopLockObj() {
4286 auto c1 = vmStack().topC();
4287 if (!tvIsObject(*c1)) raise_error("LockObj: expected an object");
4288 c1->m_data.pobj->lockObject();
4291 namespace {
4293 void implIterInit(PC& pc, const IterArgs& ita, TypedValue* base,
4294 PC targetpc, IterTypeOp op) {
4295 auto const local = base != nullptr;
4297 if (!local) base = vmStack().topC();
4298 auto val = frame_local(vmfp(), ita.valId);
4299 auto key = ita.hasKey() ? frame_local(vmfp(), ita.keyId) : nullptr;
4300 auto it = frame_iter(vmfp(), ita.iterId);
4302 if (isArrayLikeType(type(base))) {
4303 auto const arr = base->m_data.parr;
4304 auto const res = key
4305 ? new_iter_array_key_helper(op)(it, arr, val, key)
4306 : new_iter_array_helper(op)(it, arr, val);
4307 if (res == 0) pc = targetpc;
4308 if (!local) vmStack().discard();
4309 return;
4312 // NOTE: It looks like we could call new_iter_object at this point. However,
4313 // doing so is incorrect, since new_iter_array / new_iter_object only handle
4314 // array-like and object bases, respectively. We may have some other kind of
4315 // base which the generic Iter::init handles correctly.
4317 // As a result, the simplest code we could have here is the generic case.
4318 // It's also about as fast as it can get, because at this point, we're almost
4319 // always going to create an object iter, which can't really be optimized.
4322 if (it->init(base)) {
4323 tvAsVariant(val) = it->val();
4324 if (key) tvAsVariant(key) = it->key();
4325 } else {
4326 pc = targetpc;
4328 if (!local) vmStack().popC();
4331 void implIterNext(PC& pc, const IterArgs& ita, TypedValue* base, PC targetpc) {
4332 auto val = frame_local(vmfp(), ita.valId);
4333 auto key = ita.hasKey() ? frame_local(vmfp(), ita.keyId) : nullptr;
4334 auto it = frame_iter(vmfp(), ita.iterId);
4336 auto const more = [&]{
4337 if (base != nullptr && isArrayLikeType(base->m_type)) {
4338 auto const arr = base->m_data.parr;
4339 return key ? liter_next_key_ind(it, val, key, arr)
4340 : liter_next_ind(it, val, arr);
4342 return key ? iter_next_key_ind(it, val, key) : iter_next_ind(it, val);
4343 }();
4345 if (more) {
4346 vmpc() = targetpc;
4347 jmpSurpriseCheck(targetpc - pc);
4348 pc = targetpc;
4354 OPTBLD_INLINE void iopIterInit(PC& pc, const IterArgs& ita, PC targetpc) {
4355 auto const op = IterTypeOp::NonLocal;
4356 implIterInit(pc, ita, nullptr, targetpc, op);
4359 OPTBLD_INLINE void iopLIterInit(PC& pc, const IterArgs& ita,
4360 TypedValue* base, PC targetpc) {
4361 auto const op = ita.flags & IterArgs::Flags::BaseConst
4362 ? IterTypeOp::LocalBaseConst
4363 : IterTypeOp::LocalBaseMutable;
4364 implIterInit(pc, ita, base, targetpc, op);
4367 OPTBLD_INLINE void iopIterNext(PC& pc, const IterArgs& ita, PC targetpc) {
4368 implIterNext(pc, ita, nullptr, targetpc);
4371 OPTBLD_INLINE void iopLIterNext(PC& pc, const IterArgs& ita,
4372 TypedValue* base, PC targetpc) {
4373 implIterNext(pc, ita, base, targetpc);
4376 OPTBLD_INLINE void iopIterFree(Iter* it) {
4377 it->free();
4380 OPTBLD_INLINE void iopLIterFree(Iter* it, tv_lval) {
4381 it->free();
4384 OPTBLD_INLINE void inclOp(InclOpFlags flags, const char* opName) {
4385 TypedValue* c1 = vmStack().topC();
4386 auto path = String::attach(prepareKey(*c1));
4387 bool initial;
4388 TRACE(2, "inclOp %s %s %s %s \"%s\"\n",
4389 flags & InclOpFlags::Once ? "Once" : "",
4390 flags & InclOpFlags::DocRoot ? "DocRoot" : "",
4391 flags & InclOpFlags::Relative ? "Relative" : "",
4392 flags & InclOpFlags::Fatal ? "Fatal" : "",
4393 path.data());
4395 auto curUnitFilePath = [&] {
4396 namespace fs = boost::filesystem;
4397 fs::path currentUnit(vmfp()->func()->unit()->filepath()->data());
4398 fs::path currentDir(currentUnit.branch_path());
4399 return currentDir.string();
4402 auto const unit = [&] {
4403 if (flags & InclOpFlags::Relative) {
4404 String absPath = curUnitFilePath() + '/';
4405 absPath += path;
4406 return lookupUnit(absPath.get(), "", &initial,
4407 Native::s_noNativeFuncs, false);
4409 if (flags & InclOpFlags::DocRoot) {
4410 return lookupUnit(
4411 SourceRootInfo::RelativeToPhpRoot(path).get(), "", &initial,
4412 Native::s_noNativeFuncs, false);
4414 return lookupUnit(path.get(), curUnitFilePath().c_str(), &initial,
4415 Native::s_noNativeFuncs, false);
4416 }();
4418 vmStack().popC();
4419 if (unit == nullptr) {
4420 if (flags & InclOpFlags::Fatal) {
4421 raise_error("%s(%s): File not found", opName, path.data());
4422 } else {
4423 raise_warning("%s(%s): File not found", opName, path.data());
4425 vmStack().pushBool(false);
4426 return;
4429 if (!(flags & InclOpFlags::Once) || initial) {
4430 unit->merge();
4432 vmStack().pushBool(true);
4435 OPTBLD_INLINE void iopIncl() {
4436 inclOp(InclOpFlags::Default, "include");
4439 OPTBLD_INLINE void iopInclOnce() {
4440 inclOp(InclOpFlags::Once, "include_once");
4443 OPTBLD_INLINE void iopReq() {
4444 inclOp(InclOpFlags::Fatal, "require");
4447 OPTBLD_INLINE void iopReqOnce() {
4448 inclOp(InclOpFlags::Fatal | InclOpFlags::Once, "require_once");
4451 OPTBLD_INLINE void iopReqDoc() {
4452 inclOp(
4453 InclOpFlags::Fatal | InclOpFlags::Once | InclOpFlags::DocRoot,
4454 "require_once"
4458 OPTBLD_INLINE void iopEval() {
4459 TypedValue* c1 = vmStack().topC();
4461 if (UNLIKELY(RuntimeOption::EvalAuthoritativeMode)) {
4462 // Ahead of time whole program optimizations need to assume it can
4463 // see all the code, or it really can't do much.
4464 raise_error("You can't use eval in RepoAuthoritative mode");
4467 auto code = String::attach(prepareKey(*c1));
4468 String prefixedCode = concat("<?hh ", code);
4470 auto evalFilename = std::string();
4471 auto vm = &*g_context;
4472 string_printf(
4473 evalFilename,
4474 "%s(%d)(%s" EVAL_FILENAME_SUFFIX,
4475 vm->getContainingFileName()->data(),
4476 vm->getLine(),
4477 string_md5(code.slice()).c_str()
4479 auto unit = compileEvalString(prefixedCode.get(), evalFilename.c_str());
4480 if (!RuntimeOption::EvalJitEvaledCode) {
4481 unit->setInterpretOnly();
4484 vmStack().popC();
4485 if (auto const info = unit->getFatalInfo()) {
4486 auto const errnum = static_cast<int>(ErrorMode::WARNING);
4487 if (vm->errorNeedsLogging(errnum)) {
4488 // manual call to Logger instead of logError as we need to use
4489 // evalFileName and line as the exception doesn't track the eval()
4490 Logger::Error(
4491 "\nFatal error: %s in %s on line %d",
4492 info->m_fatalMsg.c_str(),
4493 evalFilename.c_str(),
4494 info->m_fatalLoc.line1
4498 vmStack().pushBool(false);
4499 return;
4501 unit->merge();
4502 vmStack().pushBool(true);
4505 OPTBLD_INLINE void iopThis() {
4506 checkThis(vmfp());
4507 ObjectData* this_ = vmfp()->getThis();
4508 vmStack().pushObject(this_);
4511 OPTBLD_INLINE void iopBareThis(BareThisOp bto) {
4512 if (vmfp()->func()->cls() && vmfp()->hasThis()) {
4513 ObjectData* this_ = vmfp()->getThis();
4514 vmStack().pushObject(this_);
4515 } else {
4516 vmStack().pushNull();
4517 switch (bto) {
4518 case BareThisOp::Notice: raise_notice(Strings::WARN_NULL_THIS); break;
4519 case BareThisOp::NoNotice: break;
4520 case BareThisOp::NeverNull:
4521 assertx(!"$this cannot be null in BareThis with NeverNull option");
4522 break;
4527 OPTBLD_INLINE void iopCheckThis() {
4528 checkThis(vmfp());
4531 OPTBLD_INLINE void iopChainFaults() {
4532 auto const current = *vmStack().indC(1);
4533 auto const prev = *vmStack().indC(0);
4534 if (!isObjectType(current.m_type) ||
4535 !current.m_data.pobj->instanceof(SystemLib::s_ThrowableClass) ||
4536 !isObjectType(prev.m_type) ||
4537 !prev.m_data.pobj->instanceof(SystemLib::s_ThrowableClass)) {
4538 raise_error(
4539 "Inputs to ChainFault must be objects that implement Throwable"
4543 // chainFaultObjects takes ownership of a reference to prev.
4544 vmStack().discard();
4545 chainFaultObjects(current.m_data.pobj, prev.m_data.pobj);
4548 OPTBLD_INLINE void iopLateBoundCls() {
4549 auto const cls = frameStaticClass(vmfp());
4550 if (!cls) raise_error(HPHP::Strings::CANT_ACCESS_STATIC);
4551 vmStack().pushClass(cls);
4554 OPTBLD_INLINE void iopVerifyParamType(local_var param) {
4555 const Func *func = vmfp()->func();
4556 assertx(param.index < func->numParams());
4557 assertx(func->numParams() == int(func->params().size()));
4558 const TypeConstraint& tc = func->params()[param.index].typeConstraint;
4559 if (tc.isCheckable()) {
4560 auto const ctx = tc.isThis() ? frameStaticClass(vmfp()) : nullptr;
4561 tc.verifyParam(param.lval, ctx, func, param.index);
4563 if (func->hasParamsWithMultiUBs()) {
4564 auto& ubs = const_cast<Func::ParamUBMap&>(func->paramUBs());
4565 auto it = ubs.find(param.index);
4566 if (it != ubs.end()) {
4567 for (auto& ub : it->second) {
4568 applyFlagsToUB(ub, tc);
4569 if (ub.isCheckable()) {
4570 auto const ctx = ub.isThis() ? frameStaticClass(vmfp()) : nullptr;
4571 ub.verifyParam(param.lval, ctx, func, param.index);
4578 OPTBLD_INLINE void iopVerifyParamTypeTS(local_var param) {
4579 iopVerifyParamType(param);
4580 auto const cell = vmStack().topC();
4581 assertx(tvIsDict(cell));
4582 auto isTypeVar = tcCouldBeReified(vmfp()->func(), param.index);
4583 bool warn = false;
4584 if ((isTypeVar || tvIsObject(param.lval)) &&
4585 !verifyReifiedLocalType(
4586 param.lval, cell->m_data.parr, frameStaticClass(vmfp()), vmfp()->func(),
4587 isTypeVar, warn)) {
4588 raise_reified_typehint_error(
4589 folly::sformat(
4590 "Argument {} passed to {}() must be an instance of {}, {} given",
4591 param.index + 1,
4592 vmfp()->func()->fullName()->data(),
4593 TypeStructure::toString(ArrNR(cell->m_data.parr),
4594 TypeStructure::TSDisplayType::TSDisplayTypeUser).c_str(),
4595 describe_actual_type(param.lval)
4596 ), warn
4599 vmStack().popC();
4602 OPTBLD_INLINE void iopVerifyOutType(uint32_t paramId) {
4603 auto const func = vmfp()->func();
4604 assertx(paramId < func->numParams());
4605 assertx(func->numParams() == int(func->params().size()));
4606 auto const& tc = func->params()[paramId].typeConstraint;
4607 if (tc.isCheckable()) {
4608 auto const ctx = tc.isThis() ? frameStaticClass(vmfp()) : nullptr;
4609 tc.verifyOutParam(vmStack().topTV(), ctx, func, paramId);
4611 if (func->hasParamsWithMultiUBs()) {
4612 auto& ubs = const_cast<Func::ParamUBMap&>(func->paramUBs());
4613 auto it = ubs.find(paramId);
4614 if (it != ubs.end()) {
4615 for (auto& ub : it->second) {
4616 applyFlagsToUB(ub, tc);
4617 if (ub.isCheckable()) {
4618 auto const ctx = ub.isThis() ? frameStaticClass(vmfp()) : nullptr;
4619 ub.verifyOutParam(vmStack().topTV(), ctx, func, paramId);
4626 namespace {
4628 OPTBLD_INLINE void verifyRetTypeImpl(size_t ind) {
4629 const auto func = vmfp()->func();
4630 const auto tc = func->returnTypeConstraint();
4631 if (tc.isCheckable()) {
4632 auto const ctx = tc.isThis() ? frameStaticClass(vmfp()) : nullptr;
4633 tc.verifyReturn(vmStack().indC(ind), ctx, func);
4635 if (func->hasReturnWithMultiUBs()) {
4636 auto& ubs = const_cast<Func::UpperBoundVec&>(func->returnUBs());
4637 for (auto& ub : ubs) {
4638 applyFlagsToUB(ub, tc);
4639 if (ub.isCheckable()) {
4640 auto const ctx = ub.isThis() ? frameStaticClass(vmfp()) : nullptr;
4641 ub.verifyReturn(vmStack().indC(ind), ctx, func);
4647 } // namespace
4649 OPTBLD_INLINE void iopVerifyRetTypeC() {
4650 verifyRetTypeImpl(0); // TypedValue is on the top of the stack
4653 OPTBLD_INLINE void iopVerifyRetTypeTS() {
4654 verifyRetTypeImpl(1); // TypedValue is the second element on the stack
4655 auto const ts = vmStack().topC();
4656 assertx(tvIsDict(ts));
4657 auto const cell = vmStack().indC(1);
4658 bool isTypeVar = tcCouldBeReified(vmfp()->func(), TypeConstraint::ReturnId);
4659 bool warn = false;
4660 if ((isTypeVar || tvIsObject(cell)) &&
4661 !verifyReifiedLocalType(
4662 cell, ts->m_data.parr, frameStaticClass(vmfp()), vmfp()->func(),
4663 isTypeVar, warn)) {
4664 raise_reified_typehint_error(
4665 folly::sformat(
4666 "Value returned from function {}() must be of type {}, {} given",
4667 vmfp()->func()->fullName()->data(),
4668 TypeStructure::toString(ArrNR(ts->m_data.parr),
4669 TypeStructure::TSDisplayType::TSDisplayTypeUser).c_str(),
4670 describe_actual_type(cell)
4671 ), warn
4674 vmStack().popC();
4677 OPTBLD_INLINE void iopVerifyRetNonNullC() {
4678 const auto func = vmfp()->func();
4679 const auto tc = func->returnTypeConstraint();
4680 auto const ctx = tc.isThis() ? frameStaticClass(vmfp()) : nullptr;
4681 tc.verifyReturnNonNull(vmStack().topC(), ctx, func);
4684 OPTBLD_INLINE TCA iopNativeImpl(PC& pc) {
4685 auto const fp = vmfp();
4686 auto const func = vmfp()->func();
4687 auto const sfp = fp->sfp();
4688 auto const jitReturn = jitReturnPre(fp);
4689 auto const native = func->arFuncPtr();
4690 assertx(native != nullptr);
4691 // Actually call the native implementation. This will handle freeing the
4692 // locals in the normal case. In the case of an exception, the VM unwinder
4693 // will take care of it.
4694 native(fp);
4696 // Adjust the stack; the native implementation put the return value in the
4697 // right place for us already
4698 vmStack().ndiscard(func->numSlotsInFrame());
4699 vmStack().ret();
4701 // Return control to the caller.
4702 returnToCaller(pc, sfp, jitReturn.callOff);
4703 return jitReturnPost(jitReturn);
4706 OPTBLD_INLINE void iopSelf() {
4707 auto const clss = arGetContextClass(vmfp());
4708 if (!clss) raise_error(HPHP::Strings::CANT_ACCESS_SELF);
4709 vmStack().pushClass(clss);
4712 OPTBLD_INLINE void iopParent() {
4713 auto const clss = arGetContextClass(vmfp());
4714 if (!clss) raise_error(HPHP::Strings::CANT_ACCESS_PARENT_WHEN_NO_CLASS);
4715 auto const parent = clss->parent();
4716 if (!parent) raise_error(HPHP::Strings::CANT_ACCESS_PARENT_WHEN_NO_PARENT);
4717 vmStack().pushClass(parent);
4720 OPTBLD_INLINE void iopCreateCl(uint32_t numArgs, uint32_t clsIx) {
4721 auto const func = vmfp()->func();
4722 auto const preCls = func->unit()->lookupPreClassId(clsIx);
4723 auto const c = Class::defClosure(preCls, true);
4725 auto const cls = c->rescope(const_cast<Class*>(func->cls()));
4726 assertx(!cls->needInitialization());
4727 auto obj = RuntimeOption::RepoAuthoritative
4728 ? createClosureRepoAuth(cls) : createClosure(cls);
4729 c_Closure::fromObject(obj)->init(numArgs, vmfp(), vmStack().top());
4730 vmStack().ndiscard(numArgs);
4731 vmStack().pushObjectNoRc(obj);
4734 static inline BaseGenerator* this_base_generator(const ActRec* fp) {
4735 auto const obj = fp->getThis();
4736 assertx(obj->getVMClass() == AsyncGenerator::getClass() ||
4737 obj->getVMClass() == Generator::getClass());
4738 return obj->getVMClass() == Generator::getClass()
4739 ? static_cast<BaseGenerator*>(Generator::fromObject(obj))
4740 : static_cast<BaseGenerator*>(AsyncGenerator::fromObject(obj));
4743 static inline Generator* this_generator(const ActRec* fp) {
4744 auto const obj = fp->getThis();
4745 return Generator::fromObject(obj);
4748 const StaticString s_this("this");
4750 OPTBLD_INLINE TCA iopCreateCont(PC origpc, PC& pc) {
4751 auto const jitReturn = jitReturnPre(vmfp());
4753 auto const fp = vmfp();
4754 auto const func = fp->func();
4755 auto const numSlots = func->numSlotsInFrame();
4756 auto const suspendOffset = func->offsetOf(origpc);
4757 assertx(!isResumed(fp));
4758 assertx(func->isGenerator());
4760 // Create the {Async,}Generator object. Create takes care of copying local
4761 // variables and iterators.
4762 auto const obj = func->isAsync()
4763 ? AsyncGenerator::Create(fp, numSlots, nullptr, suspendOffset)
4764 : Generator::Create(fp, numSlots, nullptr, suspendOffset);
4766 auto const genData = func->isAsync() ?
4767 static_cast<BaseGenerator*>(AsyncGenerator::fromObject(obj)) :
4768 static_cast<BaseGenerator*>(Generator::fromObject(obj));
4770 EventHook::FunctionSuspendCreateCont(
4772 genData->actRec(),
4773 EventHook::Source::Interpreter
4776 // Grab caller info from ActRec.
4777 ActRec* sfp = fp->sfp();
4778 Offset callOff = fp->callOffset();
4780 // Free ActRec and store the return value.
4781 vmStack().ndiscard(numSlots);
4782 vmStack().ret();
4783 tvCopy(make_tv<KindOfObject>(obj), *vmStack().topTV());
4784 assertx(vmStack().topTV() == fp->retSlot());
4786 // Return control to the caller.
4787 returnToCaller(pc, sfp, callOff);
4789 return jitReturnPost(jitReturn);
4792 OPTBLD_INLINE TCA contEnterImpl(PC origpc) {
4793 // The stack must have one cell! Or else resumableStackBase() won't work!
4794 assertx(vmStack().top() + 1 ==
4795 (TypedValue*)vmfp() - vmfp()->func()->numSlotsInFrame());
4797 auto const gen = this_base_generator(vmfp());
4798 auto const genAR = gen->actRec();
4800 // Stack overflow check.
4801 checkStack(vmStack(), genAR->func(), 0);
4803 // Point of no return, set the generator state to Running.
4804 assertx(!gen->isRunning() && !gen->isDone());
4805 gen->setState(BaseGenerator::State::Running);
4807 // Set up previous FP and return address.
4808 auto const retHelper = genAR->func()->isAsync()
4809 ? jit::tc::ustubs().asyncGenRetHelper
4810 : jit::tc::ustubs().genRetHelper;
4811 genAR->setReturn(vmfp(), origpc, retHelper, false);
4813 // Enter the generator frame.
4814 vmfp() = genAR;
4815 vmpc() = genAR->func()->at(gen->resumable()->resumeFromYieldOffset());
4817 EventHook::FunctionResumeYield(vmfp(), EventHook::Source::Interpreter);
4819 return gen->resumable()->resumeAddr();
4822 OPTBLD_INLINE TCA iopContEnter(PC origpc, PC& pc) {
4823 auto const retAddr = contEnterImpl(origpc);
4824 pc = vmpc();
4825 return retAddr;
4828 OPTBLD_INLINE void iopContRaise(PC origpc, PC& pc) {
4829 contEnterImpl(origpc);
4830 pc = vmpc();
4831 iopThrow(pc);
4834 OPTBLD_INLINE TCA yield(PC origpc, PC& pc, const TypedValue* key, const TypedValue value) {
4835 auto const jitReturn = jitReturnPre(vmfp());
4837 auto const fp = vmfp();
4838 auto const func = fp->func();
4839 auto const suspendOffset = func->offsetOf(origpc);
4840 assertx(isResumed(fp));
4841 assertx(func->isGenerator());
4843 EventHook::FunctionSuspendYield(fp, EventHook::Source::Interpreter);
4845 auto const sfp = fp->sfp();
4846 auto const callOff = fp->callOffset();
4848 if (!func->isAsync()) {
4849 // Non-async generator.
4850 assertx(fp->sfp());
4851 frame_generator(fp)->yield(suspendOffset, key, value);
4853 // Push return value of next()/send()/raise().
4854 vmStack().pushNull();
4855 } else {
4856 // Async generator.
4857 auto const gen = frame_async_generator(fp);
4858 auto const eagerResult = gen->yield(suspendOffset, key, value);
4859 if (eagerResult) {
4860 // Eager execution => return StaticWaitHandle.
4861 assertx(sfp);
4862 vmStack().pushObjectNoRc(eagerResult);
4863 } else {
4864 // Resumed execution => return control to the scheduler.
4865 assertx(!sfp);
4869 returnToCaller(pc, sfp, callOff);
4871 return jitReturnPost(jitReturn);
4874 OPTBLD_INLINE TCA iopYield(PC origpc, PC& pc) {
4875 auto const value = *vmStack().topC();
4876 vmStack().discard();
4877 return yield(origpc, pc, nullptr, value);
4880 OPTBLD_INLINE TCA iopYieldK(PC origpc, PC& pc) {
4881 auto const key = *vmStack().indC(1);
4882 auto const value = *vmStack().topC();
4883 vmStack().ndiscard(2);
4884 return yield(origpc, pc, &key, value);
4887 OPTBLD_INLINE void iopContCheck(ContCheckOp subop) {
4888 this_base_generator(vmfp())->checkNext(subop == ContCheckOp::CheckStarted);
4891 OPTBLD_INLINE void iopContValid() {
4892 vmStack().pushBool(
4893 this_generator(vmfp())->getState() != BaseGenerator::State::Done);
4896 OPTBLD_INLINE void iopContKey() {
4897 Generator* cont = this_generator(vmfp());
4898 cont->startedCheck();
4899 tvDup(cont->m_key, *vmStack().allocC());
4902 OPTBLD_INLINE void iopContCurrent() {
4903 Generator* cont = this_generator(vmfp());
4904 cont->startedCheck();
4906 if(cont->getState() == BaseGenerator::State::Done) {
4907 vmStack().pushNull();
4908 } else {
4909 tvDup(cont->m_value, *vmStack().allocC());
4913 OPTBLD_INLINE void iopContGetReturn() {
4914 Generator* cont = this_generator(vmfp());
4915 cont->startedCheck();
4917 if(!cont->successfullyFinishedExecuting()) {
4918 SystemLib::throwExceptionObject("Cannot get return value of a generator "
4919 "that hasn't returned");
4922 tvDup(cont->m_value, *vmStack().allocC());
4925 OPTBLD_INLINE void asyncSuspendE(PC origpc, PC& pc) {
4926 auto const fp = vmfp();
4927 auto const func = fp->func();
4928 auto const suspendOffset = func->offsetOf(origpc);
4929 assertx(func->isAsync());
4930 assertx(resumeModeFromActRec(fp) != ResumeMode::Async);
4932 // Pop the dependency we are blocked on.
4933 auto child = wait_handle<c_WaitableWaitHandle>(*vmStack().topC());
4934 assertx(!child->isFinished());
4935 vmStack().discard();
4937 if (!func->isGenerator()) { // Async function.
4938 // Create the AsyncFunctionWaitHandle object. Create takes care of
4939 // copying local variables and itertors.
4940 auto waitHandle = c_AsyncFunctionWaitHandle::Create(
4941 fp, func->numSlotsInFrame(), nullptr, suspendOffset, child);
4943 if (RO::EvalEnableImplicitContext) {
4944 waitHandle->m_implicitContext = *ImplicitContext::activeCtx;
4946 // Call the suspend hook. It will decref the newly allocated waitHandle
4947 // if it throws.
4948 EventHook::FunctionSuspendAwaitEF(
4950 waitHandle->actRec(),
4951 EventHook::Source::Interpreter
4954 // Grab caller info from ActRec.
4955 ActRec* sfp = fp->sfp();
4956 Offset callOff = fp->callOffset();
4958 // Free ActRec and store the return value. In case async eager return was
4959 // requested by the caller, let it know that we did not finish eagerly.
4960 vmStack().ndiscard(func->numSlotsInFrame());
4961 vmStack().ret();
4962 tvCopy(make_tv<KindOfObject>(waitHandle), *vmStack().topTV());
4963 vmStack().topTV()->m_aux.u_asyncEagerReturnFlag = 0;
4964 assertx(vmStack().topTV() == fp->retSlot());
4966 // Return control to the caller.
4967 returnToCaller(pc, sfp, callOff);
4968 } else { // Async generator.
4969 // Create new AsyncGeneratorWaitHandle.
4970 auto waitHandle = c_AsyncGeneratorWaitHandle::Create(
4971 fp, nullptr, suspendOffset, child);
4973 if (RO::EvalEnableImplicitContext) {
4974 waitHandle->m_implicitContext = *ImplicitContext::activeCtx;
4977 // Call the suspend hook. It will decref the newly allocated waitHandle
4978 // if it throws.
4979 EventHook::FunctionSuspendAwaitEG(fp, EventHook::Source::Interpreter);
4981 // Store the return value.
4982 vmStack().pushObjectNoRc(waitHandle);
4984 // Return control to the caller (AG::next()).
4985 assertx(fp->sfp());
4986 returnToCaller(pc, fp->sfp(), fp->callOffset());
4990 OPTBLD_INLINE void asyncSuspendR(PC origpc, PC& pc) {
4991 auto const fp = vmfp();
4992 auto const func = fp->func();
4993 auto const suspendOffset = func->offsetOf(origpc);
4994 assertx(!fp->sfp());
4995 assertx(func->isAsync());
4996 assertx(resumeModeFromActRec(fp) == ResumeMode::Async);
4998 // Pop the dependency we are blocked on.
4999 auto child = req::ptr<c_WaitableWaitHandle>::attach(
5000 wait_handle<c_WaitableWaitHandle>(*vmStack().topC()));
5001 assertx(!child->isFinished());
5002 vmStack().discard();
5004 // Before adjusting the stack or doing anything, check the suspend hook.
5005 // This can throw.
5006 EventHook::FunctionSuspendAwaitR(
5008 child.get(),
5009 EventHook::Source::Interpreter
5012 // Await child and suspend the async function/generator. May throw.
5013 if (!func->isGenerator()) { // Async function.
5014 if (RO::EvalEnableImplicitContext) {
5015 frame_afwh(fp)->m_implicitContext = *ImplicitContext::activeCtx;
5017 frame_afwh(fp)->await(suspendOffset, std::move(child));
5018 } else { // Async generator.
5019 auto const gen = frame_async_generator(fp);
5020 gen->resumable()->setResumeAddr(nullptr, suspendOffset);
5021 if (RO::EvalEnableImplicitContext) {
5022 gen->getWaitHandle()->m_implicitContext = *ImplicitContext::activeCtx;
5024 gen->getWaitHandle()->await(std::move(child));
5027 // Return control to the scheduler.
5028 pc = nullptr;
5029 vmfp() = nullptr;
5032 namespace {
5034 TCA suspendStack(PC origpc, PC &pc) {
5035 auto const jitReturn = jitReturnPre(vmfp());
5036 if (resumeModeFromActRec(vmfp()) == ResumeMode::Async) {
5037 // suspend resumed execution
5038 asyncSuspendR(origpc, pc);
5039 } else {
5040 // suspend eager execution
5041 asyncSuspendE(origpc, pc);
5043 return jitReturnPost(jitReturn);
5048 OPTBLD_INLINE TCA iopAwait(PC origpc, PC& pc) {
5049 auto const awaitable = vmStack().topC();
5050 auto wh = c_Awaitable::fromTV(*awaitable);
5051 if (UNLIKELY(wh == nullptr)) {
5052 SystemLib::throwBadMethodCallExceptionObject("Await on a non-Awaitable");
5054 if (LIKELY(wh->isFailed())) {
5055 throw req::root<Object>{wh->getException()};
5057 if (wh->isSucceeded()) {
5058 tvSet(wh->getResult(), *vmStack().topC());
5059 return nullptr;
5061 return suspendStack(origpc, pc);
5064 OPTBLD_INLINE TCA iopAwaitAll(PC origpc, PC& pc, LocalRange locals) {
5065 uint32_t cnt = 0;
5066 for (auto i = locals.first; i < locals.first + locals.count; ++i) {
5067 auto const local = *frame_local(vmfp(), i);
5068 if (tvIsNull(local)) continue;
5069 auto const awaitable = c_Awaitable::fromTV(local);
5070 if (UNLIKELY(awaitable == nullptr)) {
5071 SystemLib::throwBadMethodCallExceptionObject("Await on a non-Awaitable");
5073 if (!awaitable->isFinished()) {
5074 ++cnt;
5078 if (!cnt) {
5079 vmStack().pushNull();
5080 return nullptr;
5083 auto obj = Object::attach(
5084 c_AwaitAllWaitHandle::fromFrameNoCheck(vmfp(), locals.first,
5085 locals.first + locals.count, cnt)
5087 assertx(obj->isWaitHandle());
5088 assertx(!static_cast<c_Awaitable*>(obj.get())->isFinished());
5090 vmStack().pushObjectNoRc(obj.detach());
5091 return suspendStack(origpc, pc);
5094 OPTBLD_INLINE void iopWHResult() {
5095 // we should never emit this bytecode for non-waithandle
5096 auto const wh = c_Awaitable::fromTV(*vmStack().topC());
5097 if (UNLIKELY(!wh)) {
5098 raise_error("WHResult input was not a subclass of Awaitable");
5101 // the failure condition is likely since we punt to this opcode
5102 // in the JIT when the state is failed.
5103 if (wh->isFailed()) {
5104 throw_object(Object{wh->getException()});
5106 if (wh->isSucceeded()) {
5107 tvSet(wh->getResult(), *vmStack().topC());
5108 return;
5110 SystemLib::throwInvalidOperationExceptionObject(
5111 "Request for result on pending wait handle, "
5112 "must await or join() before calling result()");
5113 not_reached();
5116 OPTBLD_INLINE void iopSetImplicitContextByIndex() {
5117 if (!RO::EvalEnableImplicitContext) {
5118 vmStack().replaceC<KindOfInt64>(ImplicitContext::kEmptyIndex);
5119 return;
5121 auto const tv = vmStack().topC();
5122 if (UNLIKELY(!tvIsInt(tv))) {
5123 SystemLib::throwInvalidArgumentExceptionObject(
5124 "Invalid input to SetImplicitContextByIndex");
5126 auto const result = jit::setImplicitContextByIndex(tv->m_data.num);
5127 vmStack().replaceC<KindOfInt64>(result);
5130 OPTBLD_INLINE void iopCheckProp(const StringData* propName) {
5131 auto* cls = vmfp()->getClass();
5132 auto* propVec = cls->getPropData();
5133 always_assert(propVec);
5135 auto* ctx = arGetContextClass(vmfp());
5136 auto slot = ctx->lookupDeclProp(propName);
5137 auto index = cls->propSlotToIndex(slot);
5139 auto const val = (*propVec)[index].val;
5140 vmStack().pushBool(type(val) != KindOfUninit);
5143 OPTBLD_INLINE void iopInitProp(const StringData* propName, InitPropOp propOp) {
5144 auto* cls = vmfp()->getClass();
5146 auto* ctx = arGetContextClass(vmfp());
5147 auto* fr = vmStack().topC();
5149 auto lval = [&] () -> tv_lval {
5150 switch (propOp) {
5151 case InitPropOp::Static: {
5152 auto const slot = ctx->lookupSProp(propName);
5153 assertx(slot != kInvalidSlot);
5154 auto ret = cls->getSPropData(slot);
5155 if (RuntimeOption::EvalCheckPropTypeHints > 0) {
5156 auto const& sprop = cls->staticProperties()[slot];
5157 auto const& tc = sprop.typeConstraint;
5158 if (tc.isCheckable()) {
5159 tc.verifyStaticProperty(fr, cls, sprop.cls, sprop.name);
5162 return ret;
5165 case InitPropOp::NonStatic: {
5166 auto* propVec = cls->getPropData();
5167 always_assert(propVec);
5168 auto const slot = ctx->lookupDeclProp(propName);
5169 auto const index = cls->propSlotToIndex(slot);
5170 assertx(slot != kInvalidSlot);
5171 auto ret = (*propVec)[index].val;
5172 if (RuntimeOption::EvalCheckPropTypeHints > 0) {
5173 auto const& prop = cls->declProperties()[slot];
5174 auto const& tc = prop.typeConstraint;
5175 if (tc.isCheckable()) tc.verifyProperty(fr, cls, prop.cls, prop.name);
5177 return ret;
5180 always_assert(false);
5181 }();
5183 tvDup(*fr, lval);
5184 vmStack().popC();
5187 OPTBLD_INLINE void iopOODeclExists(OODeclExistsOp subop) {
5188 TypedValue* aloadTV = vmStack().topTV();
5189 if (aloadTV->m_type != KindOfBoolean) {
5190 raise_error("OODeclExists: Expected Bool on top of stack, got %s",
5191 tname(aloadTV->m_type).c_str());
5194 bool autoload = aloadTV->m_data.num;
5195 vmStack().popX();
5197 TypedValue* name = vmStack().topTV();
5198 if (!isStringType(name->m_type)) {
5199 raise_error("OODeclExists: Expected String on stack, got %s",
5200 tname(aloadTV->m_type).c_str());
5203 ClassKind kind;
5204 switch (subop) {
5205 case OODeclExistsOp::Class : kind = ClassKind::Class; break;
5206 case OODeclExistsOp::Trait : kind = ClassKind::Trait; break;
5207 case OODeclExistsOp::Interface : kind = ClassKind::Interface; break;
5209 tvAsVariant(name) = Class::exists(name->m_data.pstr, autoload, kind);
5212 OPTBLD_INLINE void iopSilence(tv_lval loc, SilenceOp subop) {
5213 switch (subop) {
5214 case SilenceOp::Start:
5215 type(loc) = KindOfInt64;
5216 val(loc).num = zero_error_level();
5217 break;
5218 case SilenceOp::End:
5219 assertx(type(loc) == KindOfInt64);
5220 restore_error_level(val(loc).num);
5221 break;
5225 std::string prettyStack(const std::string& prefix) {
5226 if (!vmfp()) return "__Halted";
5227 int offset = (vmfp()->func()->unit() != nullptr)
5228 ? pcOff() : 0;
5229 auto begPrefix = prefix + "__";
5230 auto midPrefix = prefix + "|| ";
5231 auto endPrefix = prefix + "\\/";
5232 auto stack = vmStack().toString(vmfp(), offset, midPrefix);
5233 return begPrefix + "\n" + stack + endPrefix;
5236 // callable from gdb
5237 void DumpStack() {
5238 fprintf(stderr, "%s\n", prettyStack("").c_str());
5241 // callable from gdb
5242 void DumpCurUnit(int skip) {
5243 ActRec* fp = vmfp();
5244 Offset pc = fp->func()->unit() ? pcOff() : 0;
5245 while (skip--) {
5246 fp = g_context->getPrevVMState(fp, &pc);
5248 if (fp == nullptr) {
5249 std::cout << "Don't have a valid fp\n";
5250 return;
5253 printf("Offset = %d, in function %s\n", pc, fp->func()->name()->data());
5254 Unit* u = fp->func()->unit();
5255 if (u == nullptr) {
5256 std::cout << "Current unit is NULL\n";
5257 return;
5259 printf("Dumping bytecode for %s(%p)\n", u->filepath()->data(), u);
5260 std::cout << u->toString();
5263 // callable from gdb
5264 void PrintTCCallerInfo() {
5265 VMRegAnchor _;
5267 auto const f = vmfp()->func();
5268 auto const u = f->unit();
5269 auto const rip = []() -> jit::TCA {
5270 DECLARE_FRAME_POINTER(reg_fp);
5271 // NB: We can't directly mutate the register-mapped `reg_fp'.
5272 for (ActRec* fp = reg_fp; fp; fp = fp->m_sfp) {
5273 auto const rip = jit::TCA(fp->m_savedRip);
5274 if (jit::tc::isValidCodeAddress(rip)) return rip;
5276 return nullptr;
5277 }();
5279 fprintf(stderr, "Called from TC address %p\n", rip);
5280 std::cerr << u->filepath()->data() << ':'
5281 << f->getLineNumber(f->offsetOf(vmpc())) << '\n';
5284 // thread-local cached coverage info
5285 static __thread Unit* s_prev_unit;
5286 static __thread int s_prev_line;
5288 void recordCodeCoverage(PC /*pc*/) {
5289 auto const func = vmfp()->func();
5290 Unit* unit = func->unit();
5291 assertx(unit != nullptr);
5292 if (unit == SystemLib::s_hhas_unit) {
5293 return;
5296 if (!RO::RepoAuthoritative && RO::EvalEnablePerFileCoverage) {
5297 if (unit->isCoverageEnabled()) {
5298 unit->recordCoverage(func->getLineNumber(pcOff()));
5300 return;
5303 int line = func->getLineNumber(pcOff());
5304 assertx(line != -1);
5305 if (unit != s_prev_unit || line != s_prev_line) {
5306 s_prev_unit = unit;
5307 s_prev_line = line;
5308 const StringData* filepath = unit->filepath();
5309 assertx(filepath->isStatic());
5310 RI().m_coverage.Record(filepath->data(), line, line);
5314 void resetCoverageCounters() {
5315 s_prev_line = -1;
5316 s_prev_unit = nullptr;
5319 static inline void
5320 condStackTraceSep(Op opcode) {
5321 TRACE(3, "%s "
5322 "========================================"
5323 "========================================\n",
5324 opcodeToName(opcode));
5327 #define COND_STACKTRACE(pfx)\
5328 ONTRACE(3, auto stack = prettyStack(pfx);\
5329 Trace::trace("%s\n", stack.c_str());)
5331 namespace {
5334 * iopWrapReturn() calls a function pointer and forwards its return value if it
5335 * returns TCA, or nullptr if returns void.
5336 * Some opcodes need the original PC by value, and some do not. We have wrappers
5337 * for both flavors. Some opcodes (FCall*) may want to return to the JIT in the
5338 * middle of an instruction, so we pass the breakOnCtlFlow flag. When this flag
5339 * is true in control flow instructions such as FCall*, we are guaranteed to
5340 * use the returned TCA to return to the JIT and so it is safe to return in
5341 * the middle of an instruction.
5343 template<typename... Params, typename... Args>
5344 OPTBLD_INLINE TCA iopWrapReturn(void(fn)(Params...), bool, PC, Args&&... args) {
5345 fn(std::forward<Args>(args)...);
5346 return nullptr;
5349 template<typename... Params, typename... Args>
5350 OPTBLD_INLINE TCA iopWrapReturn(TCA(fn)(Params...), bool, PC, Args&&... args) {
5351 return fn(std::forward<Args>(args)...);
5354 template<typename... Params, typename... Args>
5355 OPTBLD_INLINE TCA iopWrapReturn(void(fn)(PC, Params...), bool, PC origpc,
5356 Args&&... args) {
5357 fn(origpc, std::forward<Args>(args)...);
5358 return nullptr;
5361 template<typename... Params, typename... Args>
5362 OPTBLD_INLINE TCA iopWrapReturn(TCA(fn)(PC, Params...), bool, PC origpc,
5363 Args&&... args) {
5364 return fn(origpc, std::forward<Args>(args)...);
5367 template<typename... Params, typename... Args>
5368 OPTBLD_INLINE TCA iopWrapReturn(TCA(fn)(bool, PC, Params...),
5369 bool breakOnCtlFlow, PC origpc,
5370 Args&&... args) {
5371 return fn(breakOnCtlFlow, origpc, std::forward<Args>(args)...);
5375 * Some bytecodes with SA immediates want the raw Id to look up a NamedEntity
5376 * quickly, and some want the const StringData*. Support both by decoding to
5377 * this struct and implicitly converting to what the callee wants.
5379 struct litstr_id {
5380 /* implicit */ ALWAYS_INLINE operator const StringData*() const {
5381 return liveUnit()->lookupLitstrId(id);
5383 /* implicit */ ALWAYS_INLINE operator Id() const {
5384 return id;
5387 Id id{kInvalidId};
5391 * These macros are used to generate wrapper functions for the iop*() functions
5392 * defined earlier in this file. iopWrapFoo() decodes immediates from the
5393 * bytecode stream according to the signature of Foo (in hhbc.h), then calls
5394 * iopFoo() with those decoded arguments.
5396 #define FLAG_NF
5397 #define FLAG_TF
5398 #define FLAG_CF , pc
5399 #define FLAG_CF_TF FLAG_CF
5401 #define DECODE_IVA decode_iva(pc)
5402 #define DECODE_I64A decode<int64_t>(pc)
5403 #define DECODE_LA decode_local(pc)
5404 #define DECODE_NLA decode_named_local_var(pc)
5405 #define DECODE_ILA decode_indexed_local(pc)
5406 #define DECODE_IA decode_iter(pc)
5407 #define DECODE_DA decode<double>(pc)
5408 #define DECODE_SA decode<litstr_id>(pc)
5409 #define DECODE_AA decode_litarr(pc)
5410 #define DECODE_RATA decode_rat(pc)
5411 #define DECODE_BA origpc + decode_ba(pc)
5412 #define DECODE_OA(ty) decode<ty>(pc)
5413 #define DECODE_KA decode_member_key(pc, liveUnit())
5414 #define DECODE_LAR decodeLocalRange(pc)
5415 #define DECODE_ITA decodeIterArgs(pc)
5416 #define DECODE_FCA decodeFCallArgs(op, pc, liveUnit())
5417 #define DECODE_BLA decode_imm_array<Offset>(pc)
5418 #define DECODE_SLA decode_imm_array<StrVecItem>(pc)
5419 #define DECODE_VSA decode_imm_array<Id>(pc)
5421 #define DECODE_NA
5422 #define DECODE_ONE(a) auto const imm1 = DECODE_##a;
5423 #define DECODE_TWO(a, b) DECODE_ONE(a) auto const imm2 = DECODE_##b;
5424 #define DECODE_THREE(a, b, c) DECODE_TWO(a, b) auto const imm3 = DECODE_##c;
5425 #define DECODE_FOUR(a, b, c, d) \
5426 DECODE_THREE(a, b, c) auto const imm4 = DECODE_##d;
5427 #define DECODE_FIVE(a, b, c, d, e) \
5428 DECODE_FOUR(a, b, c, d) auto const imm5 = DECODE_##e;
5429 #define DECODE_SIX(a, b, c, d, e, f) \
5430 DECODE_FIVE(a, b, c, d, e) auto const imm6 = DECODE_##f;
5432 #define PASS_NA
5433 #define PASS_ONE(...) , imm1
5434 #define PASS_TWO(...) , imm1, imm2
5435 #define PASS_THREE(...) , imm1, imm2, imm3
5436 #define PASS_FOUR(...) , imm1, imm2, imm3, imm4
5437 #define PASS_FIVE(...) , imm1, imm2, imm3, imm4, imm5
5438 #define PASS_SIX(...) , imm1, imm2, imm3, imm4, imm5, imm6
5440 #ifdef HHVM_TAINT
5441 #define TAINT(name, imm, in, out, flags) \
5442 iopWrapReturn( \
5443 taint::iop##name, breakOnCtlFlow, origpc FLAG_##flags PASS_##imm);
5444 #else
5445 #define TAINT(name, imm, in, out, flags) ;
5446 #endif
5448 #define O(name, imm, in, out, flags) \
5449 template<bool breakOnCtlFlow> \
5450 OPTBLD_INLINE TCA iopWrap##name(PC& pc) { \
5451 UNUSED auto constexpr op = Op::name; \
5452 UNUSED auto const origpc = pc - encoded_op_size(op); \
5453 DECODE_##imm \
5454 TAINT(name, imm, in, out, flags); \
5455 return iopWrapReturn( \
5456 iop##name, breakOnCtlFlow, origpc FLAG_##flags PASS_##imm); \
5458 OPCODES
5460 #undef FLAG_NF
5461 #undef FLAG_TF
5462 #undef FLAG_CF
5463 #undef FLAG_CF_TF
5465 #undef DECODE_IVA
5466 #undef DECODE_I64A
5467 #undef DECODE_LA
5468 #undef DECODE_NLA
5469 #undef DECODE_ILA
5470 #undef DECODE_IA
5471 #undef DECODE_DA
5472 #undef DECODE_SA
5473 #undef DECODE_AA
5474 #undef DECODE_RATA
5475 #undef DECODE_BA
5476 #undef DECODE_OA
5477 #undef DECODE_KA
5478 #undef DECODE_LAR
5479 #undef DECODE_FCA
5480 #undef DECODE_BLA
5481 #undef DECODE_SLA
5482 #undef DECODE_VSA
5484 #undef DECODE_NA
5485 #undef DECODE_ONE
5486 #undef DECODE_TWO
5487 #undef DECODE_THREE
5488 #undef DECODE_FOUR
5489 #undef DECODE_FIVE
5491 #undef PASS_NA
5492 #undef PASS_ONE
5493 #undef PASS_TWO
5494 #undef PASS_THREE
5495 #undef PASS_FOUR
5496 #undef PASS_FIVE
5498 #undef O
5503 * The interpOne functions are fat wrappers around the iop* functions, mostly
5504 * adding a bunch of debug-only logging and stats tracking.
5506 #define O(opcode, imm, push, pop, flags) \
5507 TCA interpOne##opcode(ActRec* fp, TypedValue* sp, Offset pcOff) { \
5508 interp_set_regs(fp, sp, pcOff); \
5509 SKTRACE(5, liveSK(), \
5510 "%40s %p %p\n", \
5511 "interpOne" #opcode " before (fp,sp)", vmfp(), vmsp()); \
5512 if (Stats::enableInstrCount()) { \
5513 Stats::inc(Stats::Instr_Transl##opcode, -1); \
5514 Stats::inc(Stats::Instr_InterpOne##opcode); \
5516 if (Trace::moduleEnabled(Trace::interpOne, 1)) { \
5517 static const StringData* cat = makeStaticString("interpOne"); \
5518 static const StringData* name = makeStaticString(#opcode); \
5519 Stats::incStatGrouped(cat, name, 1); \
5521 if (Trace::moduleEnabled(Trace::ringbuffer)) { \
5522 auto sk = liveSK().toAtomicInt(); \
5523 Trace::ringbufferEntry(Trace::RBTypeInterpOne, sk, 0); \
5525 INC_TPC(interp_one) \
5526 /* Correct for over-counting in TC-stats. */ \
5527 Stats::inc(Stats::Instr_TC, -1); \
5528 condStackTraceSep(Op##opcode); \
5529 COND_STACKTRACE("op"#opcode" pre: "); \
5530 PC pc = vmpc(); \
5531 ONTRACE(1, auto offset = vmfp()->func()->offsetOf(pc); \
5532 Trace::trace("op"#opcode" offset: %d\n", offset)); \
5533 assertx(peek_op(pc) == Op::opcode); \
5534 pc += encoded_op_size(Op::opcode); \
5535 auto const retAddr = iopWrap##opcode<true>(pc); \
5536 vmpc() = pc; \
5537 COND_STACKTRACE("op"#opcode" post: "); \
5538 condStackTraceSep(Op##opcode); \
5540 * Only set regstate back to dirty if an exception is not
5541 * propagating. If an exception is throwing, regstate for this call
5542 * is actually still correct, and we don't have information in the
5543 * fixup map for interpOne calls anyway.
5544 */ \
5545 regState() = VMRegState::DIRTY; \
5546 return retAddr; \
5548 OPCODES
5549 #undef O
5551 InterpOneFunc interpOneEntryPoints[] = {
5552 #define O(opcode, imm, push, pop, flags) &interpOne##opcode,
5553 OPCODES
5554 #undef O
5557 // fast path to look up native pc; try entry point first.
5558 PcPair lookup_cti(const Func* func, PC pc) {
5559 auto unitpc = func->entry();
5560 auto cti_entry = func->ctiEntry();
5561 if (!cti_entry) {
5562 cti_entry = compile_cti(const_cast<Func*>(func), unitpc);
5564 if (pc == unitpc) {
5565 return {cti_code().base() + cti_entry, pc};
5567 return {lookup_cti(func, cti_entry, unitpc, pc), pc};
5570 template <bool breakOnCtlFlow>
5571 TCA dispatchThreaded(bool coverage) {
5572 auto modes = breakOnCtlFlow ? ExecMode::BB : ExecMode::Normal;
5573 if (coverage) {
5574 modes = modes | ExecMode::Coverage;
5576 DEBUGGER_ATTACHED_ONLY(modes = modes | ExecMode::Debugger);
5577 auto target = lookup_cti(vmfp()->func(), vmpc());
5578 CALLEE_SAVED_BARRIER();
5579 auto retAddr = g_enterCti(modes, target, rds::header());
5580 CALLEE_SAVED_BARRIER();
5581 return retAddr;
5584 template <bool breakOnCtlFlow>
5585 TCA dispatchImpl() {
5586 auto const checkCoverage = [&] {
5587 return !RO::EvalEnablePerFileCoverage
5588 ? RID().getCoverage()
5589 : vmfp() && vmfp()->unit()->isCoverageEnabled();
5591 bool collectCoverage = checkCoverage();
5592 if (cti_enabled()) {
5593 return dispatchThreaded<breakOnCtlFlow>(collectCoverage);
5596 // Unfortunately, MSVC doesn't support computed
5597 // gotos, so use a switch instead.
5598 #ifndef _MSC_VER
5599 static const void* const optabDirect[] = {
5600 #define O(name, imm, push, pop, flags) \
5601 &&Label##name,
5602 OPCODES
5603 #undef O
5605 static const void* const optabDbg[] = {
5606 #define O(name, imm, push, pop, flags) \
5607 &&LabelDbg##name,
5608 OPCODES
5609 #undef O
5611 static const void* const optabCover[] = {
5612 #define O(name, imm, push, pop, flags) \
5613 &&LabelCover##name,
5614 OPCODES
5615 #undef O
5617 assertx(sizeof(optabDirect) / sizeof(const void *) == Op_count);
5618 assertx(sizeof(optabDbg) / sizeof(const void *) == Op_count);
5619 const void* const* optab = optabDirect;
5620 if (collectCoverage) {
5621 optab = optabCover;
5623 DEBUGGER_ATTACHED_ONLY(optab = optabDbg);
5624 #endif
5626 bool isCtlFlow = false;
5627 TCA retAddr = nullptr;
5628 Op op;
5630 #ifdef _MSC_VER
5631 # define DISPATCH_ACTUAL() goto DispatchSwitch
5632 #else
5633 # define DISPATCH_ACTUAL() goto *optab[size_t(op)]
5634 #endif
5636 #define DISPATCH() do { \
5637 if (breakOnCtlFlow && isCtlFlow) { \
5638 ONTRACE(1, \
5639 Trace::trace("dispatch: Halt dispatch(%p)\n", \
5640 vmfp())); \
5641 return retAddr; \
5643 opPC = pc; \
5644 op = decode_op(pc); \
5645 COND_STACKTRACE("dispatch: "); \
5646 FTRACE(1, "dispatch: {}: {}\n", pcOff(), \
5647 instrToString(opPC, vmfp()->func())); \
5648 DISPATCH_ACTUAL(); \
5649 } while (0)
5651 ONTRACE(1, Trace::trace("dispatch: Enter dispatch(%p)\n",
5652 vmfp()));
5653 PC pc = vmpc();
5654 PC opPC;
5655 DISPATCH();
5657 #define OPCODE_DBG_BODY(name, imm, push, pop, flags) \
5658 phpDebuggerOpcodeHook(opPC)
5659 #define OPCODE_COVER_BODY(name, imm, push, pop, flags) \
5660 if (collectCoverage) { \
5661 recordCodeCoverage(opPC); \
5663 #define OPCODE_MAIN_BODY(name, imm, push, pop, flags) \
5665 if (breakOnCtlFlow && Stats::enableInstrCount()) { \
5666 Stats::inc(Stats::Instr_InterpBB##name); \
5668 retAddr = iopWrap##name<breakOnCtlFlow>(pc); \
5669 vmpc() = pc; \
5670 if (isFCallFunc(Op::name) || \
5671 Op::name == Op::NativeImpl) { \
5672 collectCoverage = checkCoverage(); \
5673 optab = !collectCoverage ? optabDirect : optabCover; \
5674 DEBUGGER_ATTACHED_ONLY(optab = optabDbg); \
5676 if (breakOnCtlFlow) { \
5677 isCtlFlow = instrIsControlFlow(Op::name); \
5679 if (instrCanHalt(Op::name) && UNLIKELY(!pc)) { \
5680 vmfp() = nullptr; \
5681 vmpc() = nullptr; \
5682 /* We returned from the top VM frame in this nesting level. This means
5683 * m_savedRip in our ActRec must have been callToExit, which should've
5684 * been returned by jitReturnPost(), whether or not we were called from
5685 * the TC. We only actually return callToExit to our caller if that
5686 * caller is dispatchBB(). */ \
5687 assertx(retAddr == jit::tc::ustubs().callToExit); \
5688 return breakOnCtlFlow ? retAddr : nullptr; \
5690 assertx(isCtlFlow || !retAddr); \
5691 DISPATCH(); \
5694 #ifdef _MSC_VER
5695 DispatchSwitch:
5696 switch (uint8_t(op)) {
5697 #define O(name, imm, push, pop, flags) \
5698 case Op::name: { \
5699 DEBUGGER_ATTACHED_ONLY(OPCODE_DBG_BODY(name, imm, push, pop, flags)); \
5700 OPCODE_COVER_BODY(name, imm, push, pop, flags) \
5701 OPCODE_MAIN_BODY(name, imm, push, pop, flags) \
5703 #else
5704 #define O(name, imm, push, pop, flags) \
5705 LabelDbg##name: \
5706 OPCODE_DBG_BODY(name, imm, push, pop, flags); \
5707 LabelCover##name: \
5708 OPCODE_COVER_BODY(name, imm, push, pop, flags) \
5709 Label##name: \
5710 OPCODE_MAIN_BODY(name, imm, push, pop, flags)
5711 #endif
5713 OPCODES
5715 #ifdef _MSC_VER
5717 #endif
5718 #undef O
5719 #undef DISPATCH
5720 #undef DISPATCH_ACTUAL
5721 #undef OPCODE_DBG_BODY
5722 #undef OPCODE_COVER_BODY
5723 #undef OPCODE_MAIN_BODY
5725 not_reached();
5728 static void dispatch() {
5729 WorkloadStats guard(WorkloadStats::InInterp);
5731 tracing::BlockNoTrace _{"dispatch"};
5733 DEBUG_ONLY auto const retAddr = dispatchImpl<false>();
5734 assertx(retAddr == nullptr);
5737 TCA dispatchBB() {
5738 auto sk = [] {
5739 return SrcKey(vmfp()->func(), vmpc(), resumeModeFromActRec(vmfp()));
5742 if (Trace::moduleEnabled(Trace::dispatchBB)) {
5743 static auto cat = makeStaticString("dispatchBB");
5744 auto name = makeStaticString(show(sk()));
5745 Stats::incStatGrouped(cat, name, 1);
5747 if (Trace::moduleEnabled(Trace::ringbuffer)) {
5748 Trace::ringbufferEntry(Trace::RBTypeDispatchBB, sk().toAtomicInt(), 0);
5750 return dispatchImpl<true>();
5753 ///////////////////////////////////////////////////////////////////////////////
5754 // Call-threaded entry points
5756 namespace {
5758 constexpr auto do_prof = false;
5760 static BoolProfiler PredictProf("predict"), LookupProf("lookup");
5762 constexpr unsigned NumPredictors = 16; // real cpus have 8-24
5763 static __thread unsigned s_predict{0};
5764 static __thread PcPair s_predictors[NumPredictors];
5765 static void pushPrediction(PcPair p) {
5766 s_predictors[s_predict++ % NumPredictors] = p;
5768 static PcPair popPrediction() {
5769 return s_predictors[--s_predict % NumPredictors];
5772 // callsites quick reference:
5774 // simple opcodes, including throw
5775 // call #addr
5776 // conditional branch
5777 // lea [pc + instrLen(pc)], nextpc_saved
5778 // call #addr
5779 // cmp rdx, nextpc_saved
5780 // jne native-target
5781 // unconditional branch
5782 // call #addr
5783 // jmp native-target
5784 // indirect branch
5785 // call #addr
5786 // jmp rax
5787 // calls w/ return prediction
5788 // lea [pc + instrLen(pc)], nextpc_arg
5789 // call #addr
5790 // jmp rax
5792 NEVER_INLINE void execModeHelper(PC pc, ExecMode modes) {
5793 if (modes & ExecMode::Debugger) phpDebuggerOpcodeHook(pc);
5794 if (modes & ExecMode::Coverage) recordCodeCoverage(pc);
5795 if (modes & ExecMode::BB) {
5796 //Stats::inc(Stats::Instr_InterpBB##name);
5800 template<Op opcode, bool repo_auth, class Iop>
5801 PcPair run(TCA* returnaddr, ExecMode modes, rds::Header* tl, PC nextpc, PC pc,
5802 Iop iop) {
5803 assert(vmpc() == pc);
5804 assert(peek_op(pc) == opcode);
5805 FTRACE(1, "dispatch: {}: {}\n", pcOff(),
5806 instrToString(pc, vmfp()->func()));
5807 if (!repo_auth) {
5808 if (UNLIKELY(modes != ExecMode::Normal)) {
5809 execModeHelper(pc, modes);
5812 DEBUG_ONLY auto origPc = pc;
5813 pc += encoded_op_size(opcode); // skip the opcode
5814 auto retAddr = iop(pc);
5815 vmpc() = pc;
5816 assert(!isThrow(opcode));
5817 if (isSimple(opcode)) {
5818 // caller ignores rax return value, invokes next bytecode
5819 return {nullptr, pc};
5821 if (isBranch(opcode) || isUnconditionalJmp(opcode)) {
5822 // callsites have no ability to indirect-jump out of bytecode.
5823 // so smash the return address to &g_exitCti
5824 // if we need to exit because of dispatchBB() mode.
5825 // TODO: t6019406 use surprise checks to eliminate BB mode
5826 if (modes & ExecMode::BB) {
5827 *returnaddr = g_exitCti;
5828 return {nullptr, (PC)retAddr}; // exit stub will return retAddr
5830 return {nullptr, pc};
5832 // call & indirect branch: caller will jump to address returned in rax
5833 if (instrCanHalt(opcode) && !pc) {
5834 vmfp() = nullptr;
5835 vmpc() = nullptr;
5836 // We returned from the top VM frame in this nesting level. This means
5837 // m_savedRip in our ActRec must have been callToExit, which should've
5838 // been returned by jitReturnPost(), whether or not we were called from
5839 // the TC. We only actually return callToExit to our caller if that
5840 // caller is dispatchBB().
5841 assert(retAddr == jit::tc::ustubs().callToExit);
5842 if (!(modes & ExecMode::BB)) retAddr = nullptr;
5843 return {g_exitCti, (PC)retAddr};
5845 if (instrIsControlFlow(opcode) && (modes & ExecMode::BB)) {
5846 return {g_exitCti, (PC)retAddr};
5848 if (isReturnish(opcode)) {
5849 auto target = popPrediction();
5850 if (do_prof) PredictProf(pc == target.pc);
5851 if (pc == target.pc) return target;
5853 if (isFCall(opcode)) {
5854 // call-like opcodes predict return to next bytecode
5855 assert(nextpc == origPc + instrLen(origPc));
5856 pushPrediction({*returnaddr + kCtiIndirectJmpSize, nextpc});
5858 if (do_prof) LookupProf(pc == vmfp()->func()->entry());
5859 // return ip to jump to, caller will do jmp(rax)
5860 return lookup_cti(vmfp()->func(), pc);
5864 // register assignments inbetween calls to cti opcodes
5865 // rax = target of indirect branch instr (call, switch, etc)
5866 // rdx = pc (passed as 3rd arg register, 2nd return register)
5867 // rbx = next-pc after branch instruction, only if isBranch(op)
5868 // r12 = rds::Header* (vmtl)
5869 // r13 = modes
5870 // r14 = location of return address to cti caller on native stack
5872 #ifdef __clang__
5873 #define DECLARE_FIXED(TL,MODES,RA)\
5874 rds::Header* TL; asm volatile("mov %%r12, %0" : "=r"(TL) ::);\
5875 ExecMode MODES; asm volatile("mov %%r13d, %0" : "=r"(MODES) ::);\
5876 TCA* RA; asm volatile("mov %%r14, %0" : "=r"(RA) ::);
5877 #else
5878 #define DECLARE_FIXED(TL,MODES,RA)\
5879 register rds::Header* TL asm("r12");\
5880 register ExecMode MODES asm("r13");\
5881 register TCA* RA asm("r14");
5882 #endif
5884 namespace cti {
5885 // generate cti::op call-threaded function for each opcode
5886 #define O(opcode, imm, push, pop, flags)\
5887 PcPair opcode(PC nextpc, TCA*, PC pc) {\
5888 DECLARE_FIXED(tl, modes, returnaddr);\
5889 return run<Op::opcode,true>(returnaddr, modes, tl, nextpc, pc,\
5890 [](PC& pc) {\
5891 return iopWrap##opcode<false>(pc);\
5892 });\
5894 OPCODES
5895 #undef O
5897 // generate debug/coverage-capable opcode bodies (for non-repo-auth)
5898 #define O(opcode, imm, push, pop, flags)\
5899 PcPair d##opcode(PC nextpc, TCA*, PC pc) {\
5900 DECLARE_FIXED(tl, modes, returnaddr);\
5901 return run<Op::opcode,false>(returnaddr, modes, tl, nextpc, pc,\
5902 [](PC& pc) {\
5903 return iopWrap##opcode<false>(pc);\
5904 });\
5906 OPCODES
5907 #undef O
5910 // generate table of opcode handler addresses, used by call-threaded emitter
5911 const CodeAddress cti_ops[] = {
5912 #define O(opcode, imm, push, pop, flags) (CodeAddress)&cti::opcode,
5913 OPCODES
5914 #undef O
5916 const CodeAddress ctid_ops[] = {
5917 #define O(opcode, imm, push, pop, flags) (CodeAddress)&cti::d##opcode,
5918 OPCODES
5919 #undef O