Fix refcounting in arReturn() and stop leaking static strings.
[hiphop-php.git] / hphp / runtime / vm / native.cpp
blobfc1b3ddf275274955cf62c51ba18fcefbdee9629
1 /*
2 +----------------------------------------------------------------------+
3 | HipHop for PHP |
4 +----------------------------------------------------------------------+
5 | Copyright (c) 2010-2014 Facebook, Inc. (http://www.facebook.com) |
6 +----------------------------------------------------------------------+
7 | This source file is subject to version 3.01 of the PHP license, |
8 | that is bundled with this package in the file LICENSE, and is |
9 | available through the world-wide-web at the following url: |
10 | http://www.php.net/license/3_01.txt |
11 | If you did not receive a copy of the PHP license and are unable to |
12 | obtain it through the world-wide-web, please send a note to |
13 | license@php.net so we can mail you a copy immediately. |
14 +----------------------------------------------------------------------+
17 #include "hphp/runtime/vm/native.h"
18 #include "hphp/runtime/vm/runtime.h"
20 namespace HPHP { namespace Native {
21 //////////////////////////////////////////////////////////////////////////////
23 BuiltinFunctionMap s_builtinFunctions;
24 ConstantMap s_constant_map;
25 ClassConstantMapMap s_class_constant_map;
27 static size_t numGPRegArgs() {
28 #ifdef __AARCH64EL__
29 return 8; // r0-r7
30 #else // amd64
31 if (UNLIKELY(RuntimeOption::EvalSimulateARM)) {
32 return 8;
34 return 6; // rdi, rsi, rdx, rcx, r8, r9
35 #endif
38 // Note: This number should generally not be modified
39 // as it depends on the CPU's ABI.
40 // If an update is needed, however, update and run
41 // make_native-func-caller.php as well
42 const size_t kNumSIMDRegs = 8;
44 /////////////////////////////////////////////////////////////////////////////
45 #include "hphp/runtime/vm/native-func-caller.h"
47 inline bool isRefType(DataType dt) {
48 return (dt != KindOfNull) && (dt != KindOfBoolean) &&
49 (dt != KindOfInt64) && (dt != KindOfDouble);
52 inline void* retPtrArg(DataType retType, TypedValue &ret) {
53 if (retType == KindOfUnknown) {
54 return &ret;
56 if (isRefType(retType)) {
57 return &ret.m_data;
59 return nullptr;
62 /* Shuffle args into two vectors.
64 * SIMD_args contains at most 8 elements for the first 8 double args in the
65 * call which will end up in xmm0-xmm7 (or v0-v7)
67 * GP_args contains all remaining args optionally with padding to ensure the
68 * GP regs only contain integer arguments (when there are less than
69 * numGPRegArgs INT args)
71 template<bool variadic>
72 static void populateArgs(const Func* func,
73 TypedValue* args, const int numArgs,
74 int64_t* GP_args, int& GP_count,
75 double* SIMD_args, int& SIMD_count) {
76 auto numGP = numGPRegArgs();
77 int64_t tmp[kMaxBuiltinArgs];
78 int ntmp = 0;
80 for (size_t i = 0; i < numArgs; ++i) {
81 DataType type;
82 if (variadic) {
83 const auto pi = func->params()[i];
84 type = pi.isVariadic() ? KindOfArray : pi.builtinType;
85 } else {
86 type = func->params()[i].builtinType;
88 if (type == KindOfDouble) {
89 if (SIMD_count < kNumSIMDRegs) {
90 SIMD_args[SIMD_count++] = args[-i].m_data.dbl;
91 } else if (GP_count < numGP) {
92 // We have enough double args to hit the stack
93 // but we haven't finished filling the GP regs yet.
94 // Stack these in tmp (autoboxed to int64_t)
95 // until we fill the GP regs, or we run out of args
96 // (in which case we'll pad them).
97 tmp[ntmp++] = args[-i].m_data.num;
98 } else {
99 // Additional SIMD args wind up on the stack
100 // and can autobox with integer types
101 GP_args[GP_count++] = args[-i].m_data.num;
103 } else {
104 assert((GP_count + 1) < kMaxBuiltinArgs);
105 if (type == KindOfUnknown) {
106 GP_args[GP_count++] = (int64_t)(args - i);
107 } else if (isRefType(type)) {
108 GP_args[GP_count++] = (int64_t)&args[-i].m_data;
109 } else {
110 GP_args[GP_count++] = args[-i].m_data.num;
112 if ((GP_count == numGP) && ntmp) {
113 // GP regs are now full, bring tmp back to fill the initial stack
114 assert((GP_count + ntmp) <= kMaxBuiltinArgs);
115 memcpy(GP_args + GP_count, tmp, ntmp * sizeof(int64_t));
116 GP_count += ntmp;
117 ntmp = 0;
121 if (ntmp) {
122 assert((GP_count + ntmp) <= kMaxBuiltinArgs);
123 // We had more than kNumSIMDRegs doubles,
124 // but less than numGPRegArgs INTs.
125 // Push out the count and leave garbage behind.
126 if (GP_count < numGP) {
127 GP_count = numGP;
129 memcpy(GP_args + GP_count, tmp, ntmp * sizeof(int64_t));
130 GP_count += ntmp;
134 /* A much simpler version of the above specialized for GP-arg-only methods */
135 template<bool variadic>
136 static void populateArgsNoDoubles(const Func* func,
137 TypedValue* args, int numArgs,
138 int64_t* GP_args, int& GP_count) {
139 if (variadic) --numArgs;
140 assert(numArgs >= 0);
141 for (int i = 0; i < numArgs; ++i) {
142 auto dt = func->params()[i].builtinType;
143 assert(dt != KindOfDouble);
144 if (dt == KindOfUnknown) {
145 GP_args[GP_count++] = (int64_t)(args - i);
146 } else if (isRefType(dt)) {
147 GP_args[GP_count++] = (int64_t)&(args[-i].m_data);
148 } else {
149 GP_args[GP_count++] = args[-i].m_data.num;
152 if (variadic) {
153 GP_args[GP_count++] = (int64_t)&(args[-numArgs].m_data);
157 template<bool usesDoubles, bool variadic>
158 void callFunc(const Func* func, void *ctx,
159 TypedValue *args, TypedValue& ret) {
160 assert(variadic == func->hasVariadicCaptureParam());
161 int64_t GP_args[kMaxBuiltinArgs];
162 double SIMD_args[kNumSIMDRegs];
163 int GP_count = 0, SIMD_count = 0;
164 const auto numArgs = func->numParams();
165 ret.m_type = func->returnType();
166 if (auto retArg = retPtrArg(ret.m_type, ret)) {
167 GP_args[GP_count++] = (int64_t)retArg;
169 if (ctx) {
170 GP_args[GP_count++] = (int64_t)ctx;
172 if (usesDoubles) {
173 populateArgs<variadic>(func, args, numArgs,
174 GP_args, GP_count, SIMD_args, SIMD_count);
175 } else {
176 populateArgsNoDoubles<variadic>(func, args, numArgs, GP_args, GP_count);
179 BuiltinFunction f = func->nativeFuncPtr();
180 switch (ret.m_type) {
181 case KindOfUnknown:
182 callFuncInt64Impl(f, GP_args, GP_count, SIMD_args, SIMD_count);
183 if (ret.m_type == KindOfUninit) {
184 ret.m_type = KindOfNull;
186 return;
187 case KindOfNull:
188 case KindOfBoolean:
189 ret.m_data.num =
190 callFuncInt64Impl(f, GP_args, GP_count, SIMD_args, SIMD_count) & 1;
191 return;
192 case KindOfInt64:
193 ret.m_data.num =
194 callFuncInt64Impl(f, GP_args, GP_count, SIMD_args, SIMD_count);
195 return;
196 case KindOfDouble:
197 ret.m_data.dbl =
198 callFuncDoubleImpl(f, GP_args, GP_count, SIMD_args, SIMD_count);
199 return;
200 default:
201 assert(isRefType(ret.m_type));
202 callFuncInt64Impl(f, GP_args, GP_count, SIMD_args, SIMD_count);
203 if (ret.m_data.num == 0) {
204 ret.m_type = KindOfNull;
206 return;
208 not_reached();
211 //////////////////////////////////////////////////////////////////////////////
213 bool coerceFCallArgs(TypedValue* args,
214 int32_t numArgs, int32_t numNonDefault,
215 const Func* func) {
216 assert(numArgs == func->numParams());
218 bool paramCoerceMode = func->isParamCoerceMode();
220 for (int32_t i = 0; (i < numNonDefault) && (i < numArgs); i++) {
221 const Func::ParamInfo& pi = func->params()[i];
223 #define COERCE_OR_CAST(kind, warn_kind) \
224 if (paramCoerceMode) { \
225 if (!tvCoerceParamTo##kind##InPlace(&args[-i])) { \
226 raise_param_type_warning( \
227 func->name()->data(), \
228 i+1, \
229 KindOf##warn_kind, \
230 args[-i].m_type \
231 ); \
232 return false; \
234 } else { \
235 tvCastTo##kind##InPlace(&args[-i]); \
238 #define CASE(kind) \
239 case KindOf##kind: \
240 COERCE_OR_CAST(kind, kind) \
241 break; /* end of case */
243 switch (pi.builtinType) {
244 CASE(Boolean)
245 CASE(Int64)
246 CASE(Double)
247 CASE(String)
248 CASE(Array)
249 CASE(Resource)
250 case KindOfObject: {
251 auto mpi = func->methInfo() ? func->methInfo()->parameters[i] : nullptr;
252 if (pi.hasDefaultValue() || (mpi && mpi->valueLen > 0)) {
253 COERCE_OR_CAST(NullableObject, Object);
254 } else {
255 COERCE_OR_CAST(Object, Object);
257 break;
259 case KindOfUnknown:
260 break;
261 default:
262 not_reached();
265 #undef CASE
266 #undef COERCE_OR_CAST
269 return true;
272 static inline int32_t minNumArgs(ActRec *ar) {
273 auto func = ar->m_func;
274 auto numArgs = func->numParams();
275 int32_t num = numArgs;
276 const Func::ParamInfoVec& paramInfo = func->params();
277 while (num &&
278 (paramInfo[num-1].funcletOff != InvalidAbsoluteOffset)) {
279 --num;
281 return num;
284 static const StringData* getInvokeName(ActRec *ar) {
285 if (ar->hasInvName()) {
286 return ar->getInvName();
288 auto func = ar->m_func;
289 auto cls = func->cls();
290 if (!cls) {
291 return func->name();
293 String clsname(const_cast<StringData*>(cls->name()));
294 String funcname(const_cast<StringData*>(func->name()));
295 return makeStaticString(clsname + "::" + funcname);
298 template<bool variadic>
299 bool nativeWrapperCheckArgs(ActRec* ar) {
300 auto func = ar->m_func;
301 auto numArgs = func->numNonVariadicParams();
302 auto numNonDefault = ar->numArgs();
304 if (numNonDefault < numArgs) {
305 const Func::ParamInfoVec& paramInfo = func->params();
306 for (auto i = numNonDefault; i < numArgs; ++i) {
307 if (InvalidAbsoluteOffset == paramInfo[i].funcletOff) {
308 // There's at least one non-default param which wasn't passed
309 throw_wrong_arguments_nr(getInvokeName(ar)->data(),
310 numNonDefault, minNumArgs(ar), numArgs, 1);
311 return false;
314 } else if (!variadic && (numNonDefault > numArgs)) {
315 // Too many arguments passed, raise a warning ourselves this time
316 throw_wrong_arguments_nr(getInvokeName(ar)->data(),
317 numNonDefault, minNumArgs(ar), numArgs, 1);
318 return false;
320 // Looks good
321 return true;
324 template<bool usesDoubles, bool variadic>
325 TypedValue* functionWrapper(ActRec* ar) {
326 assert(ar);
327 auto func = ar->m_func;
328 auto numArgs = func->numParams();
329 auto numNonDefault = ar->numArgs();
330 assert(variadic == func->hasVariadicCaptureParam());
331 TypedValue* args = ((TypedValue*)ar) - 1;
332 TypedValue rv;
333 rv.m_type = KindOfNull;
335 if (((numNonDefault == numArgs) ||
336 (nativeWrapperCheckArgs<variadic>(ar))) &&
337 (coerceFCallArgs(args, numArgs, numNonDefault, func))) {
338 callFunc<usesDoubles, variadic>(func, nullptr, args, rv);
339 } else if (func->attrs() & AttrParamCoerceModeFalse) {
340 rv.m_type = KindOfBoolean;
341 rv.m_data.num = 0;
344 assert(rv.m_type != KindOfUninit);
345 frame_free_locals_no_this_inl(ar, func->numLocals(), &rv);
346 tvCopy(rv, ar->m_r);
347 return &ar->m_r;
350 template<bool usesDoubles, bool variadic>
351 TypedValue* methodWrapper(ActRec* ar) {
352 assert(ar);
353 auto func = ar->m_func;
354 auto numArgs = func->numParams();
355 auto numNonDefault = ar->numArgs();
356 bool isStatic = func->isStatic();
357 assert(variadic == func->hasVariadicCaptureParam());
358 TypedValue* args = ((TypedValue*)ar) - 1;
359 TypedValue rv;
360 rv.m_type = KindOfNull;
362 if (((numNonDefault == numArgs) ||
363 (nativeWrapperCheckArgs<variadic>(ar))) &&
364 (coerceFCallArgs(args, numArgs, numNonDefault, func))) {
365 // Prepend a context arg for methods
366 // KindOfClass when it's being called statically Foo::bar()
367 // KindOfObject when it's being called on an instance $foo->bar()
368 void* ctx; // ObjectData* or Class*
369 if (ar->hasThis()) {
370 if (isStatic) {
371 throw_instance_method_fatal(getInvokeName(ar)->data());
373 ctx = ar->getThis();
374 } else {
375 if (!isStatic) {
376 throw_instance_method_fatal(getInvokeName(ar)->data());
378 ctx = ar->getClass();
381 callFunc<usesDoubles, variadic>(func, ctx, args, rv);
382 } else if (func->attrs() & AttrParamCoerceModeFalse) {
383 rv.m_type = KindOfBoolean;
384 rv.m_data.num = 0;
387 assert(rv.m_type != KindOfUninit);
388 if (isStatic) {
389 frame_free_locals_no_this_inl(ar, func->numLocals(), &rv);
390 } else {
391 frame_free_locals_inl(ar, func->numLocals(), &rv);
393 tvCopy(rv, ar->m_r);
394 return &ar->m_r;
397 BuiltinFunction getWrapper(bool method, bool usesDoubles, bool variadic) {
398 if (method) {
399 if ( usesDoubles && variadic) return methodWrapper<true,true>;
400 if ( usesDoubles && !variadic) return methodWrapper<true,false>;
401 if (!usesDoubles && variadic) return methodWrapper<false,true>;
402 if (!usesDoubles && !variadic) return methodWrapper<false,false>;
403 } else {
404 if ( usesDoubles && variadic) return functionWrapper<true,true>;
405 if ( usesDoubles && !variadic) return functionWrapper<true,false>;
406 if (!usesDoubles && variadic) return functionWrapper<false,true>;
407 if (!usesDoubles && !variadic) return functionWrapper<false,false>;
409 not_reached();
410 return nullptr;
413 TypedValue* unimplementedWrapper(ActRec* ar) {
414 auto func = ar->m_func;
415 auto cls = func->cls();
416 ar->m_r.m_type = KindOfNull;
417 if (cls) {
418 raise_error("Call to unimplemented native method %s::%s()",
419 cls->name()->data(), func->name()->data());
420 if (func->isStatic()) {
421 frame_free_locals_no_this_inl(ar, func->numParams(), &ar->m_r);
422 } else {
423 frame_free_locals_inl(ar, func->numParams(), &ar->m_r);
425 } else {
426 raise_error("Call to unimplemented native function %s()",
427 func->name()->data());
428 frame_free_locals_no_this_inl(ar, func->numParams(), &ar->m_r);
430 return &ar->m_r;
433 //////////////////////////////////////////////////////////////////////////////
434 }} // namespace HPHP::Native