Reland D23318594 and D23318592 add recordbasenativesp instr
[hiphop-php.git] / hphp / runtime / vm / native.cpp
blob9831f51ef94b44a7e8c42f0b3b0ddf2aff3ef664
1 /*
2 +----------------------------------------------------------------------+
3 | HipHop for PHP |
4 +----------------------------------------------------------------------+
5 | Copyright (c) 2010-present Facebook, Inc. (http://www.facebook.com) |
6 +----------------------------------------------------------------------+
7 | This source file is subject to version 3.01 of the PHP license, |
8 | that is bundled with this package in the file LICENSE, and is |
9 | available through the world-wide-web at the following url: |
10 | http://www.php.net/license/3_01.txt |
11 | If you did not receive a copy of the PHP license and are unable to |
12 | obtain it through the world-wide-web, please send a note to |
13 | license@php.net so we can mail you a copy immediately. |
14 +----------------------------------------------------------------------+
17 #include "hphp/runtime/vm/native.h"
19 #include "hphp/runtime/base/req-ptr.h"
20 #include "hphp/runtime/base/tv-type.h"
21 #include "hphp/runtime/base/type-variant.h"
22 #include "hphp/runtime/vm/func-emitter.h"
23 #include "hphp/runtime/vm/native-func-table.h"
24 #include "hphp/runtime/vm/runtime.h"
25 #include "hphp/runtime/vm/unit.h"
27 namespace HPHP { namespace Native {
29 //////////////////////////////////////////////////////////////////////////////
31 FuncTable s_systemNativeFuncs;
32 const FuncTable s_noNativeFuncs; // always empty
33 ConstantMap s_constant_map;
34 ClassConstantMapMap s_class_constant_map;
36 /////////////////////////////////////////////////////////////////////////////
38 namespace {
40 #ifdef __aarch64__
41 constexpr size_t kNumGPRegs = 8;
42 #elif defined(__powerpc64__)
43 constexpr size_t kNumGPRegs = 31;
44 #else
45 // amd64 calling convention (also used by x64): rdi, rsi, rdx, rcx, r8, r9
46 constexpr size_t kNumGPRegs = 6;
47 #endif
49 // Note: This number should generally not be modified
50 // as it depends on the CPU's ABI.
51 // If an update is needed, however, update and run
52 // make_native-func-caller.php as well
53 constexpr size_t kNumSIMDRegs = 8;
55 #include "hphp/runtime/vm/native-func-caller.h"
57 struct Registers {
58 // The spilled arguments come right after the GP regs so that we can treat
59 // them as a single array of kMaxBuiltinArgs ints after populating them.
60 int64_t GP_regs[kNumGPRegs];
61 int64_t spilled_args[kMaxBuiltinArgs - kNumGPRegs];
62 double SIMD_regs[kNumSIMDRegs];
63 TypedValue spilled_rvals[kMaxBuiltinArgs];
65 int GP_count{0};
66 int SIMD_count{0};
67 int spilled_count{0};
68 int spilled_rval_count{0};
71 // Push an int argument, spilling to the stack if necessary.
72 void pushInt(Registers& regs, const int64_t value) {
73 if (regs.GP_count < kNumGPRegs) {
74 regs.GP_regs[regs.GP_count++] = value;
75 } else {
76 assertx(regs.spilled_count < kMaxBuiltinArgs - kNumGPRegs);
77 regs.spilled_args[regs.spilled_count++] = value;
81 void pushRval(Registers& regs, tv_rval tv, bool isFCallBuiltin) {
82 if (!wide_tv_val || isFCallBuiltin) {
83 // tv_rval either points at the stack, or we don't have wide
84 // tv_vals. In either case, its actually pointing at a TypedValue
85 // already.
86 static_assert(TVOFF(m_data) == 0, "");
87 assertx((const char*)&val(tv) + TVOFF(m_type) == (const char*)&type(tv));
88 return pushInt(regs, (int64_t)&val(tv));
90 // Otherwise we need to materialize the TypedValue and push a
91 // pointer to it.
92 assertx(regs.spilled_rval_count < kMaxBuiltinArgs);
93 regs.spilled_rvals[regs.spilled_rval_count++] = *tv;
94 pushInt(regs, (int64_t)&regs.spilled_rvals[regs.spilled_rval_count - 1]);
97 // Push a double argument, spilling to the stack if necessary. We take the
98 // input as a Value in order to type-pun it as an int when we spill.
99 void pushDouble(Registers& regs, const Value value) {
100 if (regs.SIMD_count < kNumSIMDRegs) {
101 regs.SIMD_regs[regs.SIMD_count++] = value.dbl;
102 #if defined(__powerpc64__)
103 // Following ABI, we must increment the GP reg index for each double arg.
104 if (regs.GP_count < kNumGPRegs) regs.GP_regs[regs.GP_count++] = 0;
105 #endif
106 } else {
107 assertx(regs.spilled_count < kMaxBuiltinArgs - kNumGPRegs);
108 regs.spilled_args[regs.spilled_count++] = value.num;
112 // Push a TypedValue argument, spilling to the stack if necessary. We need
113 // two free GP registers to avoid spilling here. The details of what happens
114 // when we spill with one free GP register changes between architectures.
115 void pushTypedValue(Registers& regs, TypedValue tv) {
116 auto const dataType = static_cast<data_type_t>(type(tv));
117 if (regs.GP_count + 1 < kNumGPRegs) {
118 regs.GP_regs[regs.GP_count++] = val(tv).num;
119 regs.GP_regs[regs.GP_count++] = dataType;
120 } else {
121 #if defined(__powerpc64__)
122 // We don't have room to spill two 64-bit values in PowerPC. If it becomes
123 // an issue, we can always up kMaxBuiltinArgs later. (We have room to pass
124 // 15 TypedValue arguments on PowerPC already, which should be plenty.)
125 always_assert(false);
126 #else
127 assertx(regs.spilled_count + 1 < kMaxBuiltinArgs - kNumGPRegs);
128 regs.spilled_args[regs.spilled_count++] = val(tv).num;
129 regs.spilled_args[regs.spilled_count++] = dataType;
130 // On x86, if we have one free GP register left, we'll use it for the next
131 // int argument, but on ARM, we'll just spill all later int arguments.
132 #ifdef __aarch64__
133 regs.GP_count = kNumGPRegs;
134 #endif
135 #endif
139 // Push a native argument (e.g an ArrayData / TypedValue, not Array / Variant).
140 void pushNativeArg(Registers& regs, const Func* const func, const int i,
141 MaybeDataType builtinType, TypedValue arg) {
142 // If the param type is known, just pass the value.
143 if (builtinType) return pushInt(regs, val(arg).num);
145 // Pass both the type and value for TypedValue parameters.
146 pushTypedValue(regs, arg);
149 // Push each argument, spilling ones we don't have registers for to the stack.
150 void populateArgs(Registers& regs,
151 const ActRec* fp,
152 const Func* const func,
153 TypedValue* stk,
154 const int numArgs,
155 bool isFCallBuiltin) {
156 // Regular FCalls will have their out parameter locations below the ActRec on
157 // the stack, while FCallBuiltin has no ActRec to skip over.
158 auto io = isFCallBuiltin
159 ? stk + 1
160 : reinterpret_cast<TypedValue*>(const_cast<ActRec*>(fp) + 1);
162 auto const get = [&] (int idx) {
163 return isFCallBuiltin
164 ? tv_lval{&stk[-idx]}
165 : frame_local(fp, idx);
168 for (auto i = 0; i < numArgs; ++i) {
169 auto const arg = get(i);
170 auto const& pi = func->params()[i];
171 auto const type = pi.builtinType;
172 if (func->isInOut(i)) {
173 if (auto const iv = builtinInValue(func, i)) {
174 *io = *iv;
175 tvDecRefGen(arg);
176 } else {
177 *io = *arg;
180 // Any persistent values may become counted...
181 if (isArrayLikeType(io->m_type)) {
182 io->m_type = io->m_data.parr->toDataType();
183 } else if (isStringType(io->m_type)) {
184 io->m_type = KindOfString;
187 // Set the input value to null to avoid double freeing it
188 arg.type() = KindOfNull;
190 pushInt(regs, (int64_t)io++);
191 } else if (pi.isTakenAsTypedValue()) {
192 pushTypedValue(regs, *arg);
193 } else if (pi.isNativeArg()) {
194 pushNativeArg(regs, func, i, type, *arg);
195 } else if (pi.isTakenAsVariant() || !type) {
196 pushRval(regs, arg, isFCallBuiltin);
197 } else if (type == KindOfDouble) {
198 pushDouble(regs, val(arg));
199 } else if (isBuiltinByRef(type)) {
200 pushInt(regs, (int64_t)&val(arg));
201 } else {
202 pushInt(regs, val(arg).num);
207 } // namespace
209 /////////////////////////////////////////////////////////////////////////////
211 void callFunc(const Func* const func,
212 const ActRec* fp,
213 const void* const ctx,
214 TypedValue* args,
215 TypedValue& ret,
216 bool isFCallBuiltin) {
217 auto const f = func->nativeFuncPtr();
218 auto const numArgs = func->numParams();
219 auto retType = func->hniReturnType();
220 auto regs = Registers{};
222 if (ctx) pushInt(regs, (int64_t)ctx);
223 populateArgs(regs, fp, func, args, numArgs, isFCallBuiltin);
225 // Decide how many int and double arguments we need to call func. Note that
226 // spilled arguments come after the GP registers, in line with them. We can
227 // spill to the stack without exhausting the GP registers, in two ways:
229 // 1. If we exceeed the number of SIMD arguments and spill doubles.
231 // 2. If we fill all but 1 GP register and then need to pass a TypedValue
232 // (two registers in size) by value.
234 // In these cases, we'll pass garbage in the unused GP registers to force
235 // everything in the regs.spilled_args array to go on the stack.
236 auto const spilled = regs.spilled_count;
237 auto const GP_args = &regs.GP_regs[0];
238 auto const GP_count = spilled > 0 ? spilled + kNumGPRegs : regs.GP_count;
239 auto const SIMD_args = &regs.SIMD_regs[0];
240 auto const SIMD_count = regs.SIMD_count;
242 if (!retType) {
243 // A folly::none return signifies Variant.
244 if (func->isReturnByValue()) {
245 ret = callFuncTVImpl(f, GP_args, GP_count, SIMD_args, SIMD_count);
246 } else {
247 new (&ret) Variant(callFuncIndirectImpl<Variant>(f, GP_args, GP_count,
248 SIMD_args, SIMD_count));
249 if (ret.m_type == KindOfUninit) {
250 ret.m_type = KindOfNull;
253 return;
256 ret.m_type = *retType;
258 switch (*retType) {
259 case KindOfNull:
260 case KindOfBoolean:
261 ret.m_data.num =
262 callFuncInt64Impl(f, GP_args, GP_count, SIMD_args, SIMD_count) & 1;
263 return;
265 case KindOfFunc:
266 case KindOfClass:
267 case KindOfLazyClass:
268 case KindOfInt64:
269 ret.m_data.num =
270 callFuncInt64Impl(f, GP_args, GP_count, SIMD_args, SIMD_count);
271 return;
273 case KindOfDouble:
274 ret.m_data.dbl =
275 callFuncDoubleImpl(f, GP_args, GP_count, SIMD_args, SIMD_count);
276 return;
278 case KindOfPersistentString:
279 case KindOfString:
280 case KindOfPersistentVec:
281 case KindOfVec:
282 case KindOfPersistentDict:
283 case KindOfDict:
284 case KindOfPersistentKeyset:
285 case KindOfKeyset:
286 case KindOfPersistentDArray:
287 case KindOfDArray:
288 case KindOfPersistentVArray:
289 case KindOfVArray:
290 case KindOfClsMeth:
291 case KindOfObject:
292 case KindOfResource:
293 case KindOfRecord: {
294 assertx(isBuiltinByRef(ret.m_type));
295 if (func->isReturnByValue()) {
296 auto val = callFuncInt64Impl(f, GP_args, GP_count, SIMD_args,
297 SIMD_count);
298 ret.m_data.num = val;
299 } else {
300 using T = req::ptr<StringData>;
301 new (&ret.m_data) T(callFuncIndirectImpl<T>(f, GP_args, GP_count,
302 SIMD_args, SIMD_count));
304 if (ret.m_data.num == 0) {
305 ret.m_type = KindOfNull;
307 return;
310 case KindOfRFunc:
311 case KindOfRClsMeth:
312 case KindOfUninit:
313 break;
316 not_reached();
319 //////////////////////////////////////////////////////////////////////////////
321 namespace {
323 template <typename F>
324 void coerceFCallArgsImpl(int32_t numArgs, const Func* func, F args) {
325 assertx(func->isBuiltin() && "func is not a builtin");
326 assertx(numArgs == func->numParams());
328 for (int32_t i = 0; i < numArgs; i++) {
329 const Func::ParamInfo& pi = func->params()[i];
331 auto const tv = args(i);
333 auto tc = pi.typeConstraint;
334 auto targetType = pi.builtinType;
335 if (tc.isNullable()) {
336 if (tvIsNull(tv)) {
337 // No need to coerce when passed a null for a nullable type
338 continue;
340 // Arg isn't null, so treat it like the underlying type for coersion
341 // purposes. The ABI-passed type will still be mixed/Variant.
342 targetType = tc.underlyingDataType();
344 if (!targetType) {
345 targetType = tc.underlyingDataType();
348 auto const raise_type_error = [&]{
349 auto const expected_type = [&]{
350 if (tc.isVArrayOrDArray()) return "varray_or_darray";
351 return getDataTypeString(*targetType).data();
352 }();
353 auto const msg = param_type_error_message(
354 func->name()->data(), i+1, expected_type, *tv);
355 if (RuntimeOption::PHP7_EngineExceptions) {
356 SystemLib::throwTypeErrorObject(msg);
358 SystemLib::throwRuntimeExceptionObject(msg);
361 // Check the varray_or_darray and vec_or_dict union types.
362 // Precondition: the DataType of the TypedValue is correct.
364 // TODO(arnabde,kshaunak): Also support vec_or_dict here.
365 auto const check_dvarray = [&]{
366 assertx(IMPLIES(targetType, equivDataTypes(type(tv), *targetType)));
367 if (tc.isVArrayOrDArray() && !tvIsHAMSafeDVArray(tv)) {
368 raise_type_error();
372 // Check if we have the right type, or if its a Variant.
373 if (!targetType || equivDataTypes(type(tv), *targetType)) {
374 check_dvarray();
375 continue;
378 if (tvIsClass(tv) && isStringType(*targetType)) {
379 val(tv).pstr = const_cast<StringData*>(val(tv).pclass->name());
380 type(tv) = KindOfPersistentString;
381 if (RuntimeOption::EvalClassStringHintNotices) {
382 raise_notice(Strings::CLASS_TO_STRING_IMPLICIT);
384 continue;
386 if (tvIsClsMeth(tv) && tc.convertClsMethToArrLike()) {
387 if (RuntimeOption::EvalVecHintNotices) {
388 raise_clsmeth_compat_type_hint(func, tc.displayName(func->cls()), i);
390 if (RO::EvalHackArrDVArrs) {
391 tvCastToVecInPlace(tv);
392 } else {
393 tvCastToVArrayInPlace(tv);
394 check_dvarray();
396 continue;
399 raise_type_error();
405 void coerceFCallArgsFromLocals(const ActRec* fp,
406 int32_t numArgs,
407 const Func* func) {
408 coerceFCallArgsImpl(
409 numArgs, func,
410 [&] (int32_t idx) { return frame_local(fp, idx); }
414 void coerceFCallArgsFromStack(TypedValue* args,
415 int32_t numArgs,
416 const Func* func) {
417 coerceFCallArgsImpl(
418 numArgs, func,
419 [&] (int32_t idx) { return &args[-idx]; }
423 #undef CASE
424 #undef COERCE_OR_CAST
426 TypedValue* functionWrapper(ActRec* ar) {
427 assertx(ar);
428 auto func = ar->func();
429 auto numArgs = func->numParams();
430 TypedValue* args = ((TypedValue*)ar) - 1;
432 coerceFCallArgsFromLocals(ar, numArgs, func);
434 TypedValue rv;
435 rv.m_type = KindOfUninit;
436 callFunc(func, ar, nullptr, args, rv, false);
438 assertx(rv.m_type != KindOfUninit);
439 frame_free_locals_no_this_inl(ar, func->numLocals(), &rv);
440 tvCopy(rv, *ar->retSlot());
441 ar->retSlot()->m_aux.u_asyncEagerReturnFlag = 0;
442 return ar->retSlot();
445 TypedValue* methodWrapper(ActRec* ar) {
446 assertx(ar);
447 auto func = ar->func();
448 auto numArgs = func->numParams();
449 bool isStatic = func->isStatic();
450 TypedValue* args = ((TypedValue*)ar) - 1;
452 coerceFCallArgsFromLocals(ar, numArgs, func);
454 // Prepend a context arg for methods
455 // Class when it's being called statically Foo::bar()
456 // Object when it's being called on an instance $foo->bar()
457 void* ctx; // ObjectData* or Class*
458 if (ar->hasThis()) {
459 if (isStatic) {
460 throw_instance_method_fatal(func->fullName()->data());
462 ctx = ar->getThis();
463 } else {
464 if (!isStatic) {
465 throw_instance_method_fatal(func->fullName()->data());
467 ctx = ar->getClass();
470 TypedValue rv;
471 rv.m_type = KindOfUninit;
472 callFunc(func, ar, ctx, args, rv, false);
474 assertx(rv.m_type != KindOfUninit);
475 if (isStatic) {
476 frame_free_locals_no_this_inl(ar, func->numLocals(), &rv);
477 } else {
478 frame_free_locals_inl(ar, func->numLocals(), &rv);
480 tvCopy(rv, *ar->retSlot());
481 ar->retSlot()->m_aux.u_asyncEagerReturnFlag = 0;
482 return ar->retSlot();
485 [[noreturn]] TypedValue* unimplementedWrapper(ActRec* ar) {
486 auto func = ar->func();
487 auto cls = func->cls();
488 if (cls) {
489 raise_error("Call to unimplemented native method %s::%s()",
490 cls->name()->data(), func->name()->data());
492 raise_error("Call to unimplemented native function %s()",
493 func->name()->data());
496 void getFunctionPointers(const NativeFunctionInfo& info, int nativeAttrs,
497 ArFunction& bif, NativeFunction& nif) {
498 nif = info.ptr;
499 if (!nif) {
500 bif = unimplementedWrapper;
501 return;
504 auto const isMethod = info.sig.args.size() &&
505 ((info.sig.args[0] == NativeSig::Type::This) ||
506 (info.sig.args[0] == NativeSig::Type::Class));
507 bif = isMethod ? methodWrapper : functionWrapper;
510 //////////////////////////////////////////////////////////////////////////////
512 const StaticString s_outOnly("__OutOnly");
514 static MaybeDataType typeForOutParam(TypedValue attr) {
516 if (!isArrayLikeType(attr.m_type) || attr.m_data.parr->size() < 1) {
517 return {};
520 auto const& type = attr.m_data.parr->nvGetVal(attr.m_data.parr->iter_begin());
521 if (!isStringType(type.m_type)) return {};
523 auto const str = type.m_data.pstr->data();
524 if (strcmp(str, "varray") == 0) {
525 return RuntimeOption::EvalHackArrDVArrs ? KindOfVec : KindOfVArray;
527 if (strcmp(str, "darray") == 0) {
528 return RuntimeOption::EvalHackArrDVArrs ? KindOfDict : KindOfDArray;
531 #define DT(name, ...) if (strcmp(str, "KindOf" #name) == 0) return KindOf##name;
532 DATATYPES
533 #undef DT
535 return {};
538 MaybeDataType builtinOutType(
539 const TypeConstraint& tc,
540 const UserAttributeMap& map
542 auto const tcDT = tc.underlyingDataType();
544 auto const it = map.find(s_outOnly.get());
545 if (it == map.end()) return tcDT;
547 auto const dt = typeForOutParam(it->second);
548 return dt ? dt : tcDT;
551 static folly::Optional<TypedValue> builtinInValue(
552 const Func::ParamInfo& pinfo
554 auto& map = pinfo.userAttributes;
556 auto const it = map.find(s_outOnly.get());
557 if (it == map.end()) return {};
559 auto const dt = typeForOutParam(it->second);
560 if (!dt) return make_tv<KindOfNull>();
562 switch (*dt) {
563 case KindOfNull: return make_tv<KindOfNull>();
564 case KindOfBoolean: return make_tv<KindOfBoolean>(false);
565 case KindOfInt64: return make_tv<KindOfInt64>(0);
566 case KindOfDouble: return make_tv<KindOfDouble>(0.0);
567 case KindOfPersistentString:
568 case KindOfString: return make_tv<KindOfString>(staticEmptyString());
569 case KindOfPersistentVec:
570 case KindOfVec: return make_tv<KindOfVec>(ArrayData::CreateVec());
571 case KindOfPersistentDict:
572 case KindOfDict: return make_tv<KindOfDict>(ArrayData::CreateDict());
573 case KindOfPersistentKeyset:
574 case KindOfKeyset: return make_tv<KindOfNull>();
575 case KindOfPersistentDArray:
576 case KindOfDArray: return make_array_like_tv(ArrayData::CreateDArray());
577 case KindOfPersistentVArray:
578 case KindOfVArray: return make_array_like_tv(ArrayData::CreateVArray());
579 case KindOfUninit:
580 case KindOfObject:
581 case KindOfResource:
582 case KindOfRFunc:
583 case KindOfFunc:
584 case KindOfClass:
585 case KindOfLazyClass:
586 case KindOfClsMeth:
587 case KindOfRClsMeth:
588 case KindOfRecord: return make_tv<KindOfNull>();
591 not_reached();
594 folly::Optional<TypedValue> builtinInValue(const Func* builtin, uint32_t i) {
595 return builtinInValue(builtin->params()[i]);
598 //////////////////////////////////////////////////////////////////////////////
600 static bool tcCheckNative(const TypeConstraint& tc, const NativeSig::Type ty) {
601 using T = NativeSig::Type;
603 if (!tc.hasConstraint() || tc.isNullable() || tc.isCallable() ||
604 tc.isArrayKey() || tc.isNumber() || tc.isVecOrDict() ||
605 tc.isVArrayOrDArray() || tc.isArrayLike()) {
606 return ty == T::Mixed || ty == T::MixedTV;
609 if (!tc.underlyingDataType()) {
610 return false;
613 switch (*tc.underlyingDataType()) {
614 case KindOfDouble: return ty == T::Double;
615 case KindOfBoolean: return ty == T::Bool;
616 case KindOfObject: return ty == T::Object || ty == T::ObjectArg;
617 case KindOfPersistentString:
618 case KindOfString: return ty == T::String || ty == T::StringArg;
619 case KindOfPersistentVec:
620 case KindOfVec:
621 case KindOfPersistentDict:
622 case KindOfDict:
623 case KindOfPersistentKeyset:
624 case KindOfKeyset:
625 case KindOfPersistentDArray:
626 case KindOfDArray:
627 case KindOfPersistentVArray:
628 case KindOfVArray: return ty == T::Array || ty == T::ArrayArg;
629 case KindOfResource: return ty == T::Resource || ty == T::ResourceArg;
630 case KindOfUninit:
631 case KindOfNull: return ty == T::Void;
632 case KindOfInt64: return ty == T::Int64 || ty == T::Int32;
633 case KindOfRFunc: return false; // TODO(T66903859)
634 case KindOfFunc: return ty == T::Func;
635 case KindOfClass: return ty == T::Class;
636 case KindOfClsMeth: return ty == T::ClsMeth;
637 case KindOfRClsMeth: // TODO(T67037453)
638 case KindOfLazyClass: // TODO (T68823958)
639 case KindOfRecord: return false; // TODO (T41031632)
641 not_reached();
644 static bool tcCheckNativeIO(
645 const Func::ParamInfo& pinfo, const NativeSig::Type ty
647 using T = NativeSig::Type;
649 auto const checkDT = [&] (DataType dt) -> bool {
650 switch (dt) {
651 case KindOfDouble: return ty == T::DoubleIO;
652 case KindOfBoolean: return ty == T::BoolIO;
653 case KindOfObject: return ty == T::ObjectIO;
654 case KindOfPersistentString:
655 case KindOfString: return ty == T::StringIO;
656 case KindOfPersistentVec:
657 case KindOfVec: return ty == T::ArrayIO;
658 case KindOfPersistentDict:
659 case KindOfDict: return ty == T::ArrayIO;
660 case KindOfPersistentKeyset:
661 case KindOfKeyset: return ty == T::ArrayIO;
662 case KindOfPersistentDArray:
663 case KindOfDArray: return ty == T::ArrayIO;
664 case KindOfPersistentVArray:
665 case KindOfVArray: return ty == T::ArrayIO;
666 case KindOfResource: return ty == T::ResourceIO;
667 case KindOfUninit:
668 case KindOfNull: return false;
669 case KindOfInt64: return ty == T::IntIO;
670 case KindOfRFunc: return false; // TODO(T66903859)
671 case KindOfFunc: return ty == T::FuncIO;
672 case KindOfClass: return ty == T::ClassIO;
673 case KindOfClsMeth: return ty == T::ClsMethIO;
674 case KindOfRClsMeth: // TODO (T67037453)
675 case KindOfLazyClass: // TODO (T68823958)
676 case KindOfRecord: return false; // TODO (T41031632)
678 not_reached();
681 auto const tv = builtinInValue(pinfo);
682 if (tv) {
683 if (isNullType(tv->m_type)) return ty == T::MixedIO;
684 return checkDT(tv->m_type);
687 auto const& tc = pinfo.typeConstraint;
688 if (!tc.hasConstraint() || tc.isNullable() || tc.isCallable() ||
689 tc.isArrayKey() || tc.isNumber() || tc.isVecOrDict() ||
690 tc.isVArrayOrDArray() || tc.isArrayLike()) {
691 return ty == T::MixedIO;
694 if (!tc.underlyingDataType()) {
695 return false;
698 return checkDT(*tc.underlyingDataType());
701 const char* kInvalidReturnTypeMessage = "Invalid return type detected";
702 const char* kInvalidArgTypeMessage = "Invalid argument type detected";
703 const char* kInvalidArgCountMessage = "Invalid argument count detected";
704 const char* kInvalidNumArgsMessage =
705 "\"NumArgs\" builtins must take an int64_t as their first declared argument";
706 const char* kNeedStaticContextMessage =
707 "Static class functions must take a Class* as their first argument";
708 const char* kNeedObjectContextMessage =
709 "Instance methods must take an ObjectData* as their first argument";
711 static const StaticString
712 s_native("__Native"),
713 s_actrec("ActRec");
715 const char* checkTypeFunc(const NativeSig& sig,
716 const TypeConstraint& retType,
717 const FuncEmitter* func) {
718 using T = NativeSig::Type;
720 if (!tcCheckNative(retType, sig.ret)) return kInvalidReturnTypeMessage;
722 auto argIt = sig.args.begin();
723 auto endIt = sig.args.end();
724 if (func->pce()) { // called from the verifier so m_cls is not set yet
725 if (argIt == endIt) return kInvalidArgCountMessage;
726 auto const ctxTy = *argIt++;
727 if (func->attrs & HPHP::AttrStatic) {
728 if (ctxTy != T::Class) return kNeedStaticContextMessage;
729 } else {
730 if (ctxTy != T::This) return kNeedObjectContextMessage;
734 for (auto const& pInfo : func->params) {
735 if (argIt == endIt) return kInvalidArgCountMessage;
737 auto const argTy = *argIt++;
739 if (pInfo.isVariadic()) {
740 if (argTy != T::Array) return kInvalidArgTypeMessage;
741 continue;
744 if (pInfo.isInOut()) {
745 if (!tcCheckNativeIO(pInfo, argTy)) {
746 return kInvalidArgTypeMessage;
748 continue;
751 if (!tcCheckNative(pInfo.typeConstraint, argTy)) {
752 return kInvalidArgTypeMessage;
756 return argIt == endIt ? nullptr : kInvalidArgCountMessage;
759 String fullName(const StringData* fname, const StringData* cname,
760 bool isStatic) {
761 return {
762 cname == nullptr ? String{const_cast<StringData*>(fname)} :
763 (String{const_cast<StringData*>(cname)} +
764 (isStatic ? "::" : "->") +
765 String{const_cast<StringData*>(fname)})
769 NativeFunctionInfo getNativeFunction(const FuncTable& nativeFuncs,
770 const StringData* fname,
771 const StringData* cname,
772 bool isStatic) {
773 auto const name = fullName(fname, cname, isStatic);
774 if (auto info = nativeFuncs.get(name.get())) {
775 return info;
777 return NativeFunctionInfo();
780 NativeFunctionInfo getNativeFunction(const FuncTable& nativeFuncs,
781 const char* fname,
782 const char* cname,
783 bool isStatic) {
784 return getNativeFunction(nativeFuncs,
785 makeStaticString(fname),
786 cname ? makeStaticString(cname) : nullptr,
787 isStatic);
790 void registerNativeFunc(Native::FuncTable& nativeFuncs,
791 const StringData* name,
792 const NativeFunctionInfo& info) {
793 nativeFuncs.insert(name, info);
796 void FuncTable::insert(const StringData* name,
797 const NativeFunctionInfo& info) {
798 assertx(name->isStatic());
799 DEBUG_ONLY auto it = m_infos.insert(std::make_pair(name, info));
800 assertx(it.second || it.first->second == info);
803 NativeFunctionInfo FuncTable::get(const StringData* name) const {
804 auto const it = m_infos.find(name);
805 if (it != m_infos.end()) return it->second;
806 return NativeFunctionInfo();
809 void FuncTable::dump() const {
810 for (auto e : m_infos) {
811 fprintf(stderr, "%s\n", e.first->data());
815 static std::string nativeTypeString(NativeSig::Type ty) {
816 using T = NativeSig::Type;
817 switch (ty) {
818 case T::Int32:
819 case T::Int64: return "int";
820 case T::Double: return "double";
821 case T::Bool: return "bool";
822 case T::Object: return "object";
823 case T::String: return "string";
824 case T::Array: return "array";
825 case T::Resource: return "resource";
826 case T::ObjectArg: return "object";
827 case T::StringArg: return "string";
828 case T::ArrayArg: return "array";
829 case T::ResourceArg:return "resource";
830 case T::Mixed: return "mixed";
831 case T::MixedTV: return "mixed";
832 case T::This: return "this";
833 case T::Class: return "class";
834 case T::Void: return "void";
835 case T::Func: return "func";
836 case T::ClsMeth: return "clsmeth";
837 case T::IntIO: return "inout int";
838 case T::DoubleIO: return "inout double";
839 case T::BoolIO: return "inout bool";
840 case T::ObjectIO: return "inout object";
841 case T::StringIO: return "inout string";
842 case T::ArrayIO: return "inout array";
843 case T::ResourceIO: return "inout resource";
844 case T::FuncIO: return "inout func";
845 case T::ClassIO: return "inout class";
846 case T::ClsMethIO: return "inout clsmeth";
847 case T::MixedIO: return "inout mixed";
849 not_reached();
852 std::string NativeSig::toString(const char* classname,
853 const char* fname) const {
854 using T = NativeSig::Type;
856 auto str = folly::to<std::string>(nativeTypeString(ret), " ");
857 auto argIt = args.begin();
858 auto endIt = args.end();
860 if (argIt != endIt) {
861 if (classname) str += classname;
862 if (*argIt == T::This) {
863 str += "->";
864 ++argIt;
865 } else if (*argIt == T::Class) {
866 str += "::";
867 ++argIt;
870 str += folly::to<std::string>(fname,
871 "(",
872 argIt != endIt ? nativeTypeString(*argIt++)
873 : "void");
875 for (;argIt != endIt; ++argIt) {
876 str += folly::to<std::string>(", ", nativeTypeString(*argIt));
878 str += ")";
880 return str;
883 /////////////////////////////////////////////////////////////////////////////
885 bool registerConstant(const StringData* cnsName,
886 ConstantCallback callback) {
887 TypedValueAux tv;
888 tv.m_type = KindOfUninit;
889 tv.m_data.pcnt = reinterpret_cast<MaybeCountable*>(callback);
890 tv.dynamic() = true;
891 if (!Unit::defNativeConstantCallback(cnsName, tv)) {
892 return false;
894 s_constant_map[cnsName] = tv;
895 return true;
898 //////////////////////////////////////////////////////////////////////////////
899 }} // namespace HPHP::Native