Remove CppCall's indirect call kinds, move to its own header
[hiphop-php.git] / hphp / runtime / vm / jit / code-gen-arm.cpp
blob259f18468ca8fe67930457e8f3faad6f22dca0ff
1 /*
2 +----------------------------------------------------------------------+
3 | HipHop for PHP |
4 +----------------------------------------------------------------------+
5 | Copyright (c) 2010-2014 Facebook, Inc. (http://www.facebook.com) |
6 +----------------------------------------------------------------------+
7 | This source file is subject to version 3.01 of the PHP license, |
8 | that is bundled with this package in the file LICENSE, and is |
9 | available through the world-wide-web at the following url: |
10 | http://www.php.net/license/3_01.txt |
11 | If you did not receive a copy of the PHP license and are unable to |
12 | obtain it through the world-wide-web, please send a note to |
13 | license@php.net so we can mail you a copy immediately. |
14 +----------------------------------------------------------------------+
17 #include "hphp/runtime/vm/jit/code-gen-arm.h"
18 #include <vector>
20 #include "folly/Optional.h"
22 #include "hphp/runtime/ext/ext_collections.h"
23 #include "hphp/runtime/ext/ext_generator.h"
25 #include "hphp/runtime/vm/jit/abi-arm.h"
26 #include "hphp/runtime/vm/jit/arg-group.h"
27 #include "hphp/runtime/vm/jit/code-gen-helpers-arm.h"
28 #include "hphp/runtime/vm/jit/back-end-arm.h"
29 #include "hphp/runtime/vm/jit/native-calls.h"
30 #include "hphp/runtime/vm/jit/punt.h"
31 #include "hphp/runtime/vm/jit/reg-algorithms.h"
32 #include "hphp/runtime/vm/jit/service-requests-arm.h"
33 #include "hphp/runtime/vm/jit/service-requests-inline.h"
34 #include "hphp/runtime/vm/jit/translator-inline.h"
36 namespace HPHP { namespace jit { namespace arm {
38 TRACE_SET_MOD(hhir);
40 //////////////////////////////////////////////////////////////////////
42 #define NOOP_OPCODE(name) void CodeGenerator::cg##name(IRInstruction*) {}
44 NOOP_OPCODE(DefConst)
45 NOOP_OPCODE(DefFP)
46 NOOP_OPCODE(DefSP)
47 NOOP_OPCODE(TrackLoc)
48 NOOP_OPCODE(AssertLoc)
49 NOOP_OPCODE(AssertStk)
50 NOOP_OPCODE(Nop)
51 NOOP_OPCODE(DefLabel)
52 NOOP_OPCODE(ExceptionBarrier)
53 NOOP_OPCODE(TakeStack)
54 NOOP_OPCODE(TakeRef)
55 NOOP_OPCODE(EndGuards)
57 // XXX
58 NOOP_OPCODE(DbgAssertPtr);
60 // When implemented this shouldn't be a nop, but there's no reason to make us
61 // punt on everything until then.
62 NOOP_OPCODE(DbgAssertRetAddr)
64 #undef NOOP_OPCODE
66 //////////////////////////////////////////////////////////////////////
68 #define CALL_OPCODE(name) \
69 void CodeGenerator::cg##name(IRInstruction* i) { \
70 cgCallNative(vmain(), i); \
73 CALL_OPCODE(Box)
74 CALL_OPCODE(ConvIntToStr)
76 CALL_OPCODE(AllocObj)
78 CALL_OPCODE(ConcatStrStr)
79 CALL_OPCODE(ConcatIntStr)
80 CALL_OPCODE(ConcatStrInt)
81 CALL_OPCODE(ConcatStr3);
82 CALL_OPCODE(ConcatStr4);
84 CALL_OPCODE(PrintStr)
85 CALL_OPCODE(PrintInt)
86 CALL_OPCODE(PrintBool)
88 CALL_OPCODE(AddElemStrKey)
90 CALL_OPCODE(ConvBoolToArr)
91 CALL_OPCODE(ConvDblToArr)
92 CALL_OPCODE(ConvIntToArr)
93 CALL_OPCODE(ConvObjToArr)
94 CALL_OPCODE(ConvStrToArr)
95 CALL_OPCODE(ConvCellToArr)
97 CALL_OPCODE(ConvStrToBool)
98 CALL_OPCODE(ConvCellToBool)
99 CALL_OPCODE(ConvArrToDbl)
100 CALL_OPCODE(ConvObjToDbl)
101 CALL_OPCODE(ConvStrToDbl)
102 CALL_OPCODE(ConvCellToDbl)
104 CALL_OPCODE(ConvObjToInt)
105 CALL_OPCODE(ConvArrToInt)
106 CALL_OPCODE(ConvStrToInt)
108 CALL_OPCODE(RaiseWarning)
109 CALL_OPCODE(RaiseError)
110 CALL_OPCODE(ConvCellToObj)
111 CALL_OPCODE(LookupClsMethod)
112 CALL_OPCODE(RaiseNotice)
113 CALL_OPCODE(LookupClsRDSHandle)
114 CALL_OPCODE(LdSwitchStrIndex)
115 CALL_OPCODE(LdSwitchDblIndex)
116 CALL_OPCODE(LdSwitchObjIndex)
117 CALL_OPCODE(CustomInstanceInit)
118 CALL_OPCODE(LdClsCtor)
120 CALL_OPCODE(LdArrFuncCtx)
121 CALL_OPCODE(LdArrFPushCuf)
122 CALL_OPCODE(LdStrFPushCuf)
123 CALL_OPCODE(NewArray)
124 CALL_OPCODE(NewMixedArray)
125 CALL_OPCODE(NewVArray)
126 CALL_OPCODE(NewMIArray)
127 CALL_OPCODE(NewMSArray)
128 CALL_OPCODE(NewLikeArray)
129 CALL_OPCODE(NewPackedArray)
130 CALL_OPCODE(NewCol)
131 CALL_OPCODE(Clone)
132 CALL_OPCODE(ClosureStaticLocInit)
133 CALL_OPCODE(VerifyParamCallable)
134 CALL_OPCODE(VerifyParamFail)
135 CALL_OPCODE(WarnNonObjProp)
136 CALL_OPCODE(ThrowNonObjProp)
137 CALL_OPCODE(RaiseUndefProp)
138 CALL_OPCODE(AddNewElem)
139 CALL_OPCODE(ColAddElemC)
140 CALL_OPCODE(ColAddNewElemC)
141 CALL_OPCODE(ArrayAdd)
142 CALL_OPCODE(CreateCont)
143 CALL_OPCODE(CreateAFWH)
144 CALL_OPCODE(CreateSSWH)
145 CALL_OPCODE(AFWHPrepareChild)
146 CALL_OPCODE(ABCUnblock)
147 CALL_OPCODE(TypeProfileFunc)
148 CALL_OPCODE(IncStatGrouped)
149 CALL_OPCODE(ZeroErrorLevel)
150 CALL_OPCODE(RestoreErrorLevel)
151 CALL_OPCODE(Count)
152 CALL_OPCODE(CountArray)
154 /////////////////////////////////////////////////////////////////////
155 void cgPunt(const char* file, int line, const char* func, uint32_t bcOff,
156 const Func* vmFunc, bool resumed, TransID profTransId) {
157 FTRACE(1, "punting: {}\n", func);
158 throw FailedCodeGen(file, line, func, bcOff, vmFunc, resumed, profTransId);
161 #define PUNT_OPCODE(name) \
162 void CodeGenerator::cg##name(IRInstruction* inst) { \
163 cgPunt(__FILE__, __LINE__, #name, m_curInst->marker().bcOff(), \
164 curFunc(), resumed(), m_curInst->marker().profTransId()); \
167 #define CG_PUNT(instr) \
168 cgPunt(__FILE__, __LINE__, #instr, m_curInst->marker().bcOff(), \
169 curFunc(), resumed(), m_curInst->marker().profTransId())
171 /////////////////////////////////////////////////////////////////////
172 //TODO t3702757: Convert to CALL_OPCODE, the following set works on
173 // x86 but needs a closer look on arm
174 PUNT_OPCODE(AddElemIntKey)
175 PUNT_OPCODE(ConvCellToInt)
176 PUNT_OPCODE(ArrayIdx)
177 PUNT_OPCODE(RaiseArrayIndexNotice)
178 PUNT_OPCODE(RaiseUninitLoc)
179 PUNT_OPCODE(VerifyRetCallable)
180 PUNT_OPCODE(VerifyRetFail)
181 PUNT_OPCODE(GenericIdx)
182 // End of failing set
183 /////////////////////////////////////////////////////////////////////
185 PUNT_OPCODE(ProfileStr)
186 PUNT_OPCODE(ConvArrToBool)
187 PUNT_OPCODE(ConvDblToBool)
188 PUNT_OPCODE(ConvIntToBool)
189 PUNT_OPCODE(ConvObjToBool)
190 PUNT_OPCODE(ConvBoolToDbl)
191 PUNT_OPCODE(ConvIntToDbl)
193 PUNT_OPCODE(ConvBoolToInt)
194 PUNT_OPCODE(ConvDblToInt)
196 PUNT_OPCODE(ConvBoolToStr)
197 PUNT_OPCODE(ConvDblToStr)
198 PUNT_OPCODE(ConvObjToStr)
199 PUNT_OPCODE(ConvResToStr)
200 PUNT_OPCODE(ConvCellToStr)
202 PUNT_OPCODE(ProfileArray)
203 PUNT_OPCODE(CheckTypeMem)
204 PUNT_OPCODE(CheckLoc)
205 PUNT_OPCODE(CastStk)
206 PUNT_OPCODE(CastStkIntToDbl)
207 PUNT_OPCODE(CoerceStk)
208 PUNT_OPCODE(CheckDefinedClsEq)
209 PUNT_OPCODE(TryEndCatch)
210 PUNT_OPCODE(LdUnwinderValue)
211 PUNT_OPCODE(DeleteUnwinderException)
212 PUNT_OPCODE(AddDbl)
213 PUNT_OPCODE(SubDbl)
214 PUNT_OPCODE(MulDbl)
215 PUNT_OPCODE(DivDbl)
216 PUNT_OPCODE(Mod)
217 PUNT_OPCODE(Sqrt)
218 PUNT_OPCODE(AbsDbl)
219 PUNT_OPCODE(XorBool)
220 PUNT_OPCODE(ExtendsClass)
221 PUNT_OPCODE(IsWaitHandle)
222 PUNT_OPCODE(InstanceOf)
223 PUNT_OPCODE(InstanceOfIface)
224 PUNT_OPCODE(InterfaceSupportsArr)
225 PUNT_OPCODE(InterfaceSupportsStr)
226 PUNT_OPCODE(InterfaceSupportsInt)
227 PUNT_OPCODE(InterfaceSupportsDbl)
228 PUNT_OPCODE(IsTypeMem)
229 PUNT_OPCODE(IsNTypeMem)
230 PUNT_OPCODE(Gt)
231 PUNT_OPCODE(Gte)
232 PUNT_OPCODE(Lt)
233 PUNT_OPCODE(Lte)
234 PUNT_OPCODE(Eq)
235 PUNT_OPCODE(Neq)
236 PUNT_OPCODE(GtDbl)
237 PUNT_OPCODE(GteDbl)
238 PUNT_OPCODE(LtDbl)
239 PUNT_OPCODE(LteDbl)
240 PUNT_OPCODE(EqDbl)
241 PUNT_OPCODE(NeqDbl)
242 PUNT_OPCODE(LtX)
243 PUNT_OPCODE(GtX)
244 PUNT_OPCODE(GteX)
245 PUNT_OPCODE(LteX)
246 PUNT_OPCODE(EqX)
247 PUNT_OPCODE(NeqX)
248 PUNT_OPCODE(Same)
249 PUNT_OPCODE(NSame)
250 PUNT_OPCODE(Floor)
251 PUNT_OPCODE(Ceil)
252 PUNT_OPCODE(InstanceOfBitmask)
253 PUNT_OPCODE(NInstanceOfBitmask)
254 PUNT_OPCODE(IsType)
255 PUNT_OPCODE(IsScalarType)
256 PUNT_OPCODE(IsNType)
257 PUNT_OPCODE(JmpGt)
258 PUNT_OPCODE(JmpGte)
259 PUNT_OPCODE(JmpLt)
260 PUNT_OPCODE(JmpLte)
261 PUNT_OPCODE(JmpEq)
262 PUNT_OPCODE(JmpNeq)
263 PUNT_OPCODE(JmpGtInt)
264 PUNT_OPCODE(JmpGteInt)
265 PUNT_OPCODE(JmpLtInt)
266 PUNT_OPCODE(JmpLteInt)
267 PUNT_OPCODE(JmpEqInt)
268 PUNT_OPCODE(JmpNeqInt)
269 PUNT_OPCODE(JmpSame)
270 PUNT_OPCODE(JmpNSame)
271 PUNT_OPCODE(JmpInstanceOfBitmask)
272 PUNT_OPCODE(JmpNInstanceOfBitmask)
273 PUNT_OPCODE(JmpZero)
274 PUNT_OPCODE(JmpNZero)
275 PUNT_OPCODE(ReqBindJmpGt)
276 PUNT_OPCODE(ReqBindJmpGte)
277 PUNT_OPCODE(ReqBindJmpLt)
278 PUNT_OPCODE(ReqBindJmpLte)
279 PUNT_OPCODE(ReqBindJmpEq)
280 PUNT_OPCODE(ReqBindJmpNeq)
281 PUNT_OPCODE(ReqBindJmpGtInt)
282 PUNT_OPCODE(ReqBindJmpGteInt)
283 PUNT_OPCODE(ReqBindJmpLtInt)
284 PUNT_OPCODE(ReqBindJmpLteInt)
285 PUNT_OPCODE(ReqBindJmpEqInt)
286 PUNT_OPCODE(ReqBindJmpNeqInt)
287 PUNT_OPCODE(ReqBindJmpSame)
288 PUNT_OPCODE(ReqBindJmpNSame)
289 PUNT_OPCODE(ReqBindJmpInstanceOfBitmask)
290 PUNT_OPCODE(ReqBindJmpNInstanceOfBitmask)
291 PUNT_OPCODE(ReqBindJmpZero)
292 PUNT_OPCODE(ReqBindJmpNZero)
293 PUNT_OPCODE(SideExitJmpGt)
294 PUNT_OPCODE(SideExitJmpGte)
295 PUNT_OPCODE(SideExitJmpLt)
296 PUNT_OPCODE(SideExitJmpLte)
297 PUNT_OPCODE(SideExitJmpEq)
298 PUNT_OPCODE(SideExitJmpNeq)
299 PUNT_OPCODE(SideExitJmpGtInt)
300 PUNT_OPCODE(SideExitJmpGteInt)
301 PUNT_OPCODE(SideExitJmpLtInt)
302 PUNT_OPCODE(SideExitJmpLteInt)
303 PUNT_OPCODE(SideExitJmpEqInt)
304 PUNT_OPCODE(SideExitJmpNeqInt)
305 PUNT_OPCODE(SideExitJmpSame)
306 PUNT_OPCODE(SideExitJmpNSame)
307 PUNT_OPCODE(SideExitJmpInstanceOfBitmask)
308 PUNT_OPCODE(SideExitJmpNInstanceOfBitmask)
309 PUNT_OPCODE(SideExitJmpZero)
310 PUNT_OPCODE(SideExitJmpNZero)
311 PUNT_OPCODE(SideExitGuardLoc)
312 PUNT_OPCODE(JmpIndirect)
313 PUNT_OPCODE(CheckSurpriseFlags)
314 PUNT_OPCODE(SurpriseHook)
315 PUNT_OPCODE(FunctionSuspendHook)
316 PUNT_OPCODE(FunctionReturnHook)
317 PUNT_OPCODE(ReleaseVVOrExit)
318 PUNT_OPCODE(CheckInit)
319 PUNT_OPCODE(CheckInitMem)
320 PUNT_OPCODE(CheckCold)
321 PUNT_OPCODE(CheckNullptr)
322 PUNT_OPCODE(CheckBounds)
323 PUNT_OPCODE(LdVectorSize)
324 PUNT_OPCODE(CheckPackedArrayBounds)
325 PUNT_OPCODE(CheckPackedArrayElemNull)
326 PUNT_OPCODE(VectorHasImmCopy)
327 PUNT_OPCODE(VectorDoCow)
328 PUNT_OPCODE(CheckNonNull)
329 PUNT_OPCODE(AssertNonNull)
330 PUNT_OPCODE(UnboxPtr)
331 PUNT_OPCODE(BoxPtr)
332 PUNT_OPCODE(LdVectorBase)
333 PUNT_OPCODE(LdPairBase)
334 PUNT_OPCODE(LdLocAddr)
335 PUNT_OPCODE(LdMem)
336 PUNT_OPCODE(LdProp)
337 PUNT_OPCODE(LdElem)
338 PUNT_OPCODE(LdPackedArrayElem)
339 PUNT_OPCODE(LdRef)
340 PUNT_OPCODE(LdGbl)
341 PUNT_OPCODE(LdThis)
342 PUNT_OPCODE(LdRetAddr)
343 PUNT_OPCODE(ConvClsToCctx)
344 PUNT_OPCODE(LdCtx)
345 PUNT_OPCODE(LdCctx)
346 PUNT_OPCODE(LdCls)
347 PUNT_OPCODE(LdClsCached)
348 PUNT_OPCODE(LdClsCachedSafe)
349 PUNT_OPCODE(LdClsCtx)
350 PUNT_OPCODE(LdClsCctx)
351 PUNT_OPCODE(LdClsCns)
352 PUNT_OPCODE(LookupClsCns)
353 PUNT_OPCODE(LdCns)
354 PUNT_OPCODE(LdClsInitData)
355 PUNT_OPCODE(LookupCns)
356 PUNT_OPCODE(LookupCnsE)
357 PUNT_OPCODE(LookupCnsU)
358 PUNT_OPCODE(DerefClsRDSHandle)
359 PUNT_OPCODE(LookupClsMethodCache)
360 PUNT_OPCODE(LdClsMethodCacheFunc)
361 PUNT_OPCODE(LdClsMethodCacheCls)
362 PUNT_OPCODE(LdClsMethodFCacheFunc)
363 PUNT_OPCODE(LookupClsMethodFCache)
364 PUNT_OPCODE(GetCtxFwdCallDyn);
365 PUNT_OPCODE(GetCtxFwdCall)
366 PUNT_OPCODE(LdClsMethod)
367 PUNT_OPCODE(LdPropAddr)
368 PUNT_OPCODE(LdClsPropAddrKnown)
369 PUNT_OPCODE(LdClsPropAddrOrNull)
370 PUNT_OPCODE(LdClsPropAddrOrRaise)
371 PUNT_OPCODE(LdObjMethod)
372 PUNT_OPCODE(LdObjInvoke)
373 PUNT_OPCODE(LdGblAddrDef)
374 PUNT_OPCODE(LdGblAddr)
375 PUNT_OPCODE(LdObjClass)
376 PUNT_OPCODE(LdFunc)
377 PUNT_OPCODE(LdFuncCachedU)
378 PUNT_OPCODE(LdFuncCachedSafe)
379 PUNT_OPCODE(LdBindAddr)
380 PUNT_OPCODE(LdSSwitchDestFast)
381 PUNT_OPCODE(LdSSwitchDestSlow)
382 PUNT_OPCODE(JmpSwitchDest)
383 PUNT_OPCODE(ConstructInstance)
384 PUNT_OPCODE(CheckInitProps)
385 PUNT_OPCODE(InitProps)
386 PUNT_OPCODE(CheckInitSProps)
387 PUNT_OPCODE(InitSProps)
388 PUNT_OPCODE(RegisterLiveObj)
389 PUNT_OPCODE(NewInstanceRaw)
390 PUNT_OPCODE(InitObjProps)
391 PUNT_OPCODE(StClosureFunc)
392 PUNT_OPCODE(StClosureArg)
393 PUNT_OPCODE(StClosureCtx)
394 PUNT_OPCODE(NewStructArray)
395 PUNT_OPCODE(FreeActRec)
396 PUNT_OPCODE(CallArray)
397 PUNT_OPCODE(NativeImpl)
398 PUNT_OPCODE(RetCtrl)
399 PUNT_OPCODE(StRetVal)
400 PUNT_OPCODE(RetAdjustStack)
401 PUNT_OPCODE(StMem)
402 PUNT_OPCODE(StProp)
403 PUNT_OPCODE(StLocNT)
404 PUNT_OPCODE(StRef)
405 PUNT_OPCODE(StRaw)
406 PUNT_OPCODE(StElem)
407 PUNT_OPCODE(LdStaticLocCached)
408 PUNT_OPCODE(CheckStaticLocInit)
409 PUNT_OPCODE(StaticLocInitCached)
410 PUNT_OPCODE(CufIterSpillFrame)
411 PUNT_OPCODE(ReqRetranslateOpt)
412 PUNT_OPCODE(Mov)
413 PUNT_OPCODE(LdMIStateAddr)
414 PUNT_OPCODE(IncRefCtx)
415 PUNT_OPCODE(DecRefThis)
416 PUNT_OPCODE(GenericRetDecRefs)
417 PUNT_OPCODE(DecRef)
418 PUNT_OPCODE(DecRefNZ)
419 PUNT_OPCODE(DefInlineFP)
420 PUNT_OPCODE(InlineReturn)
421 PUNT_OPCODE(ReDefSP)
422 PUNT_OPCODE(OODeclExists);
423 PUNT_OPCODE(VerifyParamCls)
424 PUNT_OPCODE(VerifyRetCls)
425 PUNT_OPCODE(ConcatCellCell)
426 PUNT_OPCODE(AKExists)
427 PUNT_OPCODE(ContEnter)
428 PUNT_OPCODE(ContPreNext)
429 PUNT_OPCODE(ContStartedCheck)
430 PUNT_OPCODE(ContValid)
431 PUNT_OPCODE(ContArIncKey)
432 PUNT_OPCODE(ContArUpdateIdx)
433 PUNT_OPCODE(LdContActRec)
434 PUNT_OPCODE(StContArRaw)
435 PUNT_OPCODE(LdContArValue)
436 PUNT_OPCODE(StContArValue)
437 PUNT_OPCODE(LdContArKey)
438 PUNT_OPCODE(StContArKey)
439 PUNT_OPCODE(StAsyncArRaw)
440 PUNT_OPCODE(StAsyncArResult)
441 PUNT_OPCODE(LdAsyncArParentChain)
442 PUNT_OPCODE(AFWHBlockOn)
443 PUNT_OPCODE(LdWHState)
444 PUNT_OPCODE(LdWHResult)
445 PUNT_OPCODE(LdAFWHActRec)
446 PUNT_OPCODE(LdResumableArObj)
447 PUNT_OPCODE(IterInit)
448 PUNT_OPCODE(IterInitK)
449 PUNT_OPCODE(IterNext)
450 PUNT_OPCODE(IterNextK)
451 PUNT_OPCODE(WIterInit)
452 PUNT_OPCODE(WIterInitK)
453 PUNT_OPCODE(WIterNext)
454 PUNT_OPCODE(WIterNextK)
455 PUNT_OPCODE(MIterInit)
456 PUNT_OPCODE(MIterInitK)
457 PUNT_OPCODE(MIterNext)
458 PUNT_OPCODE(MIterNextK)
459 PUNT_OPCODE(IterFree)
460 PUNT_OPCODE(MIterFree)
461 PUNT_OPCODE(DecodeCufIter)
462 PUNT_OPCODE(CIterFree)
463 PUNT_OPCODE(DefMIStateBase)
464 PUNT_OPCODE(BaseG)
465 PUNT_OPCODE(PropX)
466 PUNT_OPCODE(PropDX)
467 PUNT_OPCODE(PropDXStk)
468 PUNT_OPCODE(CGetProp)
469 PUNT_OPCODE(VGetProp)
470 PUNT_OPCODE(VGetPropStk)
471 PUNT_OPCODE(BindProp)
472 PUNT_OPCODE(BindPropStk)
473 PUNT_OPCODE(SetProp)
474 PUNT_OPCODE(SetPropStk)
475 PUNT_OPCODE(UnsetProp)
476 PUNT_OPCODE(SetOpProp)
477 PUNT_OPCODE(SetOpPropStk)
478 PUNT_OPCODE(IncDecProp)
479 PUNT_OPCODE(IncDecPropStk)
480 PUNT_OPCODE(EmptyProp)
481 PUNT_OPCODE(IssetProp)
482 PUNT_OPCODE(ElemX)
483 PUNT_OPCODE(ElemArray)
484 PUNT_OPCODE(ElemDX)
485 PUNT_OPCODE(ElemDXStk)
486 PUNT_OPCODE(ElemUX)
487 PUNT_OPCODE(ElemUXStk)
488 PUNT_OPCODE(ArrayGet)
489 PUNT_OPCODE(StringGet)
490 PUNT_OPCODE(MapGet)
491 PUNT_OPCODE(CGetElem)
492 PUNT_OPCODE(VGetElem)
493 PUNT_OPCODE(VGetElemStk)
494 PUNT_OPCODE(BindElem)
495 PUNT_OPCODE(BindElemStk)
496 PUNT_OPCODE(ArraySet)
497 PUNT_OPCODE(MapSet)
498 PUNT_OPCODE(ArraySetRef)
499 PUNT_OPCODE(SetElem)
500 PUNT_OPCODE(SetElemStk)
501 PUNT_OPCODE(SetWithRefElem)
502 PUNT_OPCODE(SetWithRefElemStk)
503 PUNT_OPCODE(UnsetElem)
504 PUNT_OPCODE(UnsetElemStk)
505 PUNT_OPCODE(SetOpElem)
506 PUNT_OPCODE(SetOpElemStk)
507 PUNT_OPCODE(IncDecElem)
508 PUNT_OPCODE(IncDecElemStk)
509 PUNT_OPCODE(SetNewElem)
510 PUNT_OPCODE(SetNewElemStk)
511 PUNT_OPCODE(SetNewElemArray)
512 PUNT_OPCODE(SetNewElemArrayStk)
513 PUNT_OPCODE(SetWithRefNewElem)
514 PUNT_OPCODE(SetWithRefNewElemStk)
515 PUNT_OPCODE(BindNewElem)
516 PUNT_OPCODE(BindNewElemStk)
517 PUNT_OPCODE(ArrayIsset)
518 PUNT_OPCODE(StringIsset)
519 PUNT_OPCODE(VectorIsset)
520 PUNT_OPCODE(PairIsset)
521 PUNT_OPCODE(MapIsset)
522 PUNT_OPCODE(IssetElem)
523 PUNT_OPCODE(EmptyElem)
524 PUNT_OPCODE(IncStat)
525 PUNT_OPCODE(RBTrace)
526 PUNT_OPCODE(IncTransCounter)
527 PUNT_OPCODE(IncProfCounter)
528 PUNT_OPCODE(DbgAssertType)
529 PUNT_OPCODE(AddIntO)
530 PUNT_OPCODE(SubIntO)
531 PUNT_OPCODE(MulIntO)
532 PUNT_OPCODE(EagerSyncVMRegs)
533 PUNT_OPCODE(ColIsEmpty)
534 PUNT_OPCODE(ColIsNEmpty)
536 #undef PUNT_OPCODE
538 //////////////////////////////////////////////////////////////////////
540 // copy of ifThen in mc-generator-internal.h
541 template <class Then>
542 void ifThen(Vout& v, ConditionCode cc, Vreg sf, Then thenBlock) {
543 auto then = v.makeBlock();
544 auto done = v.makeBlock();
545 v << jcc{cc, sf, {done, then}};
546 v = then;
547 thenBlock(v);
548 if (!v.closed()) v << jmp{done};
549 v = done;
552 template <class Then>
553 void ifZero(Vout& v, unsigned bit, Vreg r, Then thenBlock) {
554 auto then = v.makeBlock();
555 auto done = v.makeBlock();
556 v << tbcc{vixl::eq, bit, r, {done, then}};
557 v = then;
558 thenBlock(v);
559 if (!v.closed()) v << jmp{done};
560 v = done;
563 template <class T, class F>
564 Vreg condZero(Vout& v, Vreg r, Vreg dst, T t, F f) {
565 using namespace x64;
566 auto fblock = v.makeBlock();
567 auto tblock = v.makeBlock();
568 auto done = v.makeBlock();
569 v << cbcc{vixl::eq, r, {fblock, tblock}};
570 v = tblock;
571 auto treg = t(v);
572 v << phijmp{done, v.makeTuple(VregList{treg})};
573 v = fblock;
574 auto freg = f(v);
575 v << phijmp{done, v.makeTuple(VregList{freg})};
576 v = done;
577 v << phidef{v.makeTuple(VregList{dst})};
578 return dst;
581 // copy of ifThenElse from code-gen-x64.cpp
582 template <class Then, class Else>
583 void ifThenElse(Vout& v, ConditionCode cc, Vreg sf, Then thenBlock,
584 Else elseBlock) {
585 auto thenLabel = v.makeBlock();
586 auto elseLabel = v.makeBlock();
587 auto done = v.makeBlock();
588 v << jcc{cc, sf, {elseLabel, thenLabel}};
589 v = thenLabel;
590 thenBlock(v);
591 if (!v.closed()) v << jmp{done};
592 v = elseLabel;
593 elseBlock(v);
594 if (!v.closed()) v << jmp{done};
595 v = done;
598 Vloc CodeGenerator::srcLoc(unsigned i) const {
599 return m_slocs[i];
602 Vloc CodeGenerator::dstLoc(unsigned i) const {
603 return m_dlocs[i];
606 ArgGroup CodeGenerator::argGroup() const {
607 return ArgGroup(m_curInst, m_slocs);
610 Vlabel CodeGenerator::label(Block* b) {
611 return m_state.labels[b];
614 //////////////////////////////////////////////////////////////////////
616 void CodeGenerator::recordHostCallSyncPoint(Vout& v, Vpoint p) {
617 auto stackOff = m_curInst->marker().spOff();
618 auto pcOff = m_curInst->marker().bcOff() - m_curInst->marker().func()->base();
619 v << hcsync{Fixup{pcOff, stackOff}, p};
622 //////////////////////////////////////////////////////////////////////
624 void CodeGenerator::cgConjure(IRInstruction* inst) {
625 always_assert(false);
628 void CodeGenerator::cgHalt(IRInstruction* inst) {
629 always_assert(false);
632 //////////////////////////////////////////////////////////////////////
634 void CodeGenerator::cgJmp(IRInstruction* inst) {
635 auto& v = vmain();
636 v << jmp{label(inst->taken())};
639 void CodeGenerator::cgDbgAssertRefCount(IRInstruction* inst) {
640 // maybe reuse emitAssertRefCount
641 auto base = srcLoc(0).reg();
642 auto& v = vmain();
643 auto rCount = v.makeReg();
644 v << loadl{base[FAST_REFCOUNT_OFFSET], rCount};
645 ifZero(v, UncountedBitPos, rCount, [&](Vout& v) {
646 auto const sf = v.makeReg();
647 v << cmpli{RefCountMaxRealistic, rCount, sf};
648 ifThen(v, CC_A, sf, [&](Vout& v) {
649 v << brk{0};
654 void CodeGenerator::cgIncRef(IRInstruction* inst) {
655 SSATmp* src = inst->src(0);
656 auto loc = srcLoc(0);
657 Type type = src->type();
658 if (type.notCounted()) return;
660 auto increfMaybeStatic = [&](Vout& v) {
661 auto base = loc.reg(0);
662 auto rCount = v.makeReg();
663 v << loadl{base[FAST_REFCOUNT_OFFSET], rCount};
664 if (!type.needsStaticBitCheck()) {
665 auto count1 = v.makeReg();
666 v << addli{1, rCount, count1, v.makeReg()};
667 v << storel{count1, base[FAST_REFCOUNT_OFFSET]};
668 } else {
669 auto const sf = v.makeReg();
670 v << cmpli{0, rCount, sf};
671 static_assert(UncountedValue < 0 && StaticValue < 0, "");
672 ifThen(v, CC_GE, sf, [&](Vout& v) {
673 auto count1 = v.makeReg();
674 v << addli{1, rCount, count1, v.makeReg()};
675 v << storel{count1, base[FAST_REFCOUNT_OFFSET]};
680 auto& v = vmain();
681 if (type.isKnownDataType()) {
682 assert(IS_REFCOUNTED_TYPE(type.toDataType()));
683 increfMaybeStatic(v);
684 } else {
685 auto const sf = v.makeReg();
686 v << cmpli{KindOfRefCountThreshold, loc.reg(1), sf};
687 ifThen(v, CC_G, sf, [&](Vout& v) { increfMaybeStatic(v); });
691 void CodeGenerator::cgAssertType(IRInstruction* inst) {
692 auto const src = srcLoc(0);
693 auto const dst = dstLoc(0);
694 auto& v = vmain();
695 if (dst.reg(0) != InvalidReg && dst.reg(1) != InvalidReg) {
696 v << copy2{src.reg(0), src.reg(1), dst.reg(0), dst.reg(1)};
697 } else if (dst.reg(0) != InvalidReg) {
698 v << copy{src.reg(0), dst.reg(0)};
699 } else if (dst.reg(1) != InvalidReg) {
700 v << copy{src.reg(1), dst.reg(1)};
704 //////////////////////////////////////////////////////////////////////
706 void CodeGenerator::emitDecRefStaticType(Vout& v, Type type, Vreg data) {
707 assert(type.isKnownDataType());
708 auto done = v.makeBlock();
709 auto count = v.makeReg();
710 v << loadl{data[FAST_REFCOUNT_OFFSET], count};
711 if (type.needsStaticBitCheck()) {
712 auto next = v.makeBlock();
713 v << tbcc{vixl::ne, UncountedBitPos, count, {next, done}};
714 v = next;
716 auto count1 = v.makeReg();
717 auto destruct = v.makeBlock();
718 auto const sf = v.makeReg();
719 v << subli{1, count, count1, sf};
720 v << storel{count1, data[FAST_REFCOUNT_OFFSET]};
721 v << jcc{CC_Z, sf, {done, destruct}};
722 v = destruct;
723 cgCallHelper(v,
724 MCGenerator::getDtorCall(type.toDataType()),
725 kVoidDest,
726 SyncOptions::kSyncPoint,
727 argGroup().reg(data));
728 v << jmp{done};
729 v = done;
732 void CodeGenerator::emitDecRefDynamicType(Vout& v, Vreg base, int offset) {
733 auto counted_type = v.makeBlock();
734 auto counted_obj = v.makeBlock();
735 auto destruct = v.makeBlock();
736 auto done = v.makeBlock();
737 auto type = v.makeReg();
738 auto data = v.makeReg();
739 auto count = v.makeReg();
740 auto count1 = v.makeReg();
742 // Check the type
744 v << loadzbl{base[offset + TVOFF(m_type)], type};
745 auto const sf = v.makeReg();
746 v << cmpli{KindOfRefCountThreshold, type, sf};
747 v << jcc{CC_LE, sf, {counted_type, done}};
748 v = counted_type;
751 // Type is refcounted. Load the refcount.
752 v << load{base[offset + TVOFF(m_data)], data};
753 v << loadl{data[FAST_REFCOUNT_OFFSET], count};
755 // Is it static? Note that only the lower 32 bits of count are valid right
756 // now, but tbcc is only looking at a single one of them, so this is OK.
757 v << tbcc{vixl::ne, UncountedBitPos, count, {counted_obj, done}};
758 v = counted_obj;
761 // Not static. Decrement and write back.
762 auto const sf = v.makeReg();
763 v << subli{1, count, count1, sf};
764 v << storel{count1, data[FAST_REFCOUNT_OFFSET]};
766 // Did it go to zero?
767 v << jcc{CC_NZ, sf, {destruct, done}};
768 v = destruct;
771 // Went to zero. Have to destruct.
772 cgCallHelper(v,
773 CppCall::direct(tv_release_generic),
774 kVoidDest,
775 SyncOptions::kSyncPoint,
776 argGroup().addr(base, offset));
777 v << jmp{done};
778 v = done;
781 void CodeGenerator::emitDecRefMem(Vout& v, Type type, Vreg base, int offset) {
782 if (type.needsReg()) {
783 emitDecRefDynamicType(v, base, offset);
784 } else if (type.maybeCounted()) {
785 auto data = v.makeReg();
786 v << load{base[offset + TVOFF(m_data)], data};
787 emitDecRefStaticType(v, type, data);
791 void CodeGenerator::cgDecRefStack(IRInstruction* inst) {
792 emitDecRefMem(vmain(), inst->typeParam(),
793 srcLoc(0).reg(),
794 cellsToBytes(inst->extra<DecRefStack>()->offset));
797 void CodeGenerator::cgDecRefLoc(IRInstruction* inst) {
798 emitDecRefMem(vmain(), inst->typeParam(),
799 srcLoc(0).reg(),
800 localOffset(inst->extra<DecRefLoc>()->locId));
803 void CodeGenerator::cgDecRefMem(IRInstruction* inst) {
804 emitDecRefMem(vmain(), inst->typeParam(),
805 srcLoc(0).reg(),
806 inst->src(1)->intVal());
809 //////////////////////////////////////////////////////////////////////
810 // Arithmetic Instructions
812 void CodeGenerator::cgAddInt(IRInstruction* inst) {
813 auto dstReg = dstLoc(0).reg();
814 auto srcRegL = srcLoc(0).reg();
815 auto srcRegR = srcLoc(1).reg();
816 auto& v = vmain();
817 v << addq{srcRegR, srcRegL, dstReg, v.makeReg()};
820 void CodeGenerator::cgSubInt(IRInstruction* inst) {
821 auto dstReg = dstLoc(0).reg();
822 auto srcRegL = srcLoc(0).reg();
823 auto srcRegR = srcLoc(1).reg();
824 auto& v = vmain();
825 v << subq{srcRegR, srcRegL, dstReg, v.makeReg()};
828 void CodeGenerator::cgMulInt(IRInstruction* inst) {
829 auto dstReg = dstLoc(0).reg();
830 auto srcRegL = srcLoc(0).reg();
831 auto srcRegR = srcLoc(1).reg();
832 auto& v = vmain();
833 v << imul{srcRegR, srcRegL, dstReg, v.makeReg()};
836 //////////////////////////////////////////////////////////////////////
837 // Bitwise Operators
839 void CodeGenerator::cgAndInt(IRInstruction* inst) {
840 auto dstReg = dstLoc(0).reg();
841 auto srcRegL = srcLoc(0).reg();
842 auto srcRegR = srcLoc(1).reg();
843 auto& v = vmain();
844 v << andq{srcRegR, srcRegL, dstReg, v.makeReg()};
847 void CodeGenerator::cgOrInt(IRInstruction* inst) {
848 auto dstReg = dstLoc(0).reg();
849 auto srcRegL = srcLoc(0).reg();
850 auto srcRegR = srcLoc(1).reg();
851 auto& v = vmain();
852 v << orq{srcRegR, srcRegL, dstReg, v.makeReg()};
855 void CodeGenerator::cgXorInt(IRInstruction* inst) {
856 auto dstReg = dstLoc(0).reg();
857 auto srcRegL = srcLoc(0).reg();
858 auto srcRegR = srcLoc(1).reg();
859 auto& v = vmain();
860 v << xorq{srcRegR, srcRegL, dstReg, v.makeReg()};
863 void CodeGenerator::cgShl(IRInstruction* inst) {
864 auto dstReg = dstLoc(0).reg();
865 auto srcRegL = srcLoc(0).reg();
866 auto srcRegR = srcLoc(1).reg();
868 // TODO: t3870154 add shift-by-immediate support to vixl
869 vmain() << lslv{srcRegL, srcRegR, dstReg};
872 void CodeGenerator::cgShr(IRInstruction* inst) {
873 auto dstReg = dstLoc(0).reg();
874 auto srcRegL = srcLoc(0).reg();
875 auto srcRegR = srcLoc(1).reg();
877 // TODO: t3870154 add shift-by-immediate support to vixl
878 vmain() << asrv{srcRegL, srcRegR, dstReg};
881 //////////////////////////////////////////////////////////////////////
882 // Comparison Operations
884 void CodeGenerator::emitCompareIntAndSet(IRInstruction *inst,
885 ConditionCode cc) {
886 auto const sf = emitCompareInt(inst);
887 auto dst = dstLoc(0).reg();
888 vmain() << setcc{cc, sf, dst};
891 Vreg CodeGenerator::emitCompareInt(IRInstruction* inst) {
892 auto srcRegL = srcLoc(0).reg();
893 auto srcRegR = srcLoc(1).reg();
894 auto& v = vmain();
895 auto const sf = v.makeReg();
896 v << cmpq{srcRegR, srcRegL, sf}; // att-style
897 return sf;
900 void CodeGenerator::cgLtInt(IRInstruction* inst) {
901 emitCompareIntAndSet(inst, CC_L);
904 void CodeGenerator::cgGtInt(IRInstruction* inst) {
905 emitCompareIntAndSet(inst, CC_G);
909 void CodeGenerator::cgGteInt(IRInstruction* inst) {
910 emitCompareIntAndSet(inst, CC_GE);
913 void CodeGenerator::cgLteInt(IRInstruction* inst) {
914 emitCompareIntAndSet(inst, CC_LE);
918 void CodeGenerator::cgEqInt(IRInstruction* inst) {
919 emitCompareIntAndSet(inst, CC_E);
922 void CodeGenerator::cgNeqInt(IRInstruction* inst) {
923 emitCompareIntAndSet(inst, CC_NE);
926 //////////////////////////////////////////////////////////////////////
928 static void shuffleArgs(Vout& v, ArgGroup& args, CppCall& call) {
929 MovePlan moves;
930 PhysReg::Map<ArgDesc*> argDescs;
932 for (size_t i = 0; i < args.numGpArgs(); i++) {
933 auto& arg = args.gpArg(i);
934 auto kind = arg.kind();
935 if (!(kind == ArgDesc::Kind::Reg ||
936 kind == ArgDesc::Kind::Addr ||
937 kind == ArgDesc::Kind::TypeReg)) {
938 continue;
940 auto srcReg = arg.srcReg();
941 auto dstReg = arg.dstReg();
942 if (srcReg != dstReg && srcReg.isPhys()) {
943 moves[dstReg] = srcReg;
944 argDescs[dstReg] = &arg;
948 auto const howTo = doVregMoves(v.unit(), moves);
949 for (auto& how : howTo) {
950 auto src = how.m_src;
951 auto dst = how.m_dst;
952 if (how.m_kind == VMoveInfo::Kind::Move) {
953 if (dst.isVirt()) {
954 v << copy{src, dst};
955 } else {
956 auto* argDesc = argDescs[how.m_dst];
957 if (argDesc) {
958 auto kind = argDesc->kind();
959 if (kind == ArgDesc::Kind::Addr) {
960 v << lea{src[argDesc->disp().l()], dst};
961 } else {
962 if (argDesc->isZeroExtend()) {
963 v << movzbl{src, dst};
964 } else {
965 v << copy{src, dst};
968 if (kind != ArgDesc::Kind::TypeReg) {
969 argDesc->markDone();
971 } else {
972 v << copy{src, dst};
975 } else {
976 v << copy2{src, dst, dst, src};
980 for (size_t i = 0; i < args.numGpArgs(); ++i) {
981 auto& arg = args.gpArg(i);
982 if (arg.done()) continue;
983 auto kind = arg.kind();
984 auto src = arg.srcReg();
985 auto dst = arg.dstReg();
986 if (kind == ArgDesc::Kind::Imm) {
987 v << ldimm{arg.imm().q(), dst};
988 } else if (kind == ArgDesc::Kind::Reg) {
989 if (arg.isZeroExtend()) {
990 if (src.isVirt()) {
991 v << movzbl{src, dst};
992 } else {
993 v << movzbl{dst, dst};
995 } else {
996 if (src.isVirt()) {
997 v << copy{src, dst};
1000 } else if (kind == ArgDesc::Kind::TypeReg) {
1001 if (kTypeShiftBits > 0) {
1002 if (src.isVirt()) {
1003 v << shlqi{kTypeShiftBits, src, dst, v.makeReg()};
1004 } else {
1005 v << shlqi{kTypeShiftBits, dst, dst, v.makeReg()};
1007 } else {
1008 if (src.isVirt()) {
1009 v << copy{src, dst};
1012 } else if (kind == ArgDesc::Kind::Addr) {
1013 if (src.isVirt()) {
1014 v << addqi{arg.disp(), src, dst, v.makeReg()};
1015 } else {
1016 v << addqi{arg.disp(), dst, dst, v.makeReg()};
1018 } else {
1019 not_implemented();
1024 void CodeGenerator::cgCallNative(Vout& v, IRInstruction* inst) {
1025 using namespace NativeCalls;
1027 Opcode opc = inst->op();
1028 always_assert(CallMap::hasInfo(opc));
1030 auto const& info = CallMap::info(opc);
1031 ArgGroup argGroup = toArgGroup(info, m_slocs, inst);
1033 auto call = [&]() -> CppCall {
1034 switch (info.func.type) {
1035 case FuncType::Call:
1036 return CppCall(info.func.call);
1037 case FuncType::SSA:
1038 return CppCall::direct(
1039 reinterpret_cast<void(*)()>(inst->src(info.func.srcIdx)->tcaVal()));
1041 not_reached();
1042 }();
1044 auto const dest = [&]() -> CallDest {
1045 switch (info.dest) {
1046 case DestType::None: return kVoidDest;
1047 case DestType::TV:
1048 case DestType::SIMD: return callDestTV(inst);
1049 case DestType::SSA: return callDest(inst);
1050 case DestType::Dbl: return callDestDbl(inst);
1052 not_reached();
1053 }();
1055 cgCallHelper(v, call, dest, info.sync, argGroup);
1058 void CodeGenerator::cgCallHelper(Vout& v,
1059 CppCall call,
1060 const CallDest& dstInfo,
1061 SyncOptions sync,
1062 ArgGroup& args) {
1063 assert(m_curInst->isNative());
1065 auto dstReg0 = dstInfo.reg0;
1066 DEBUG_ONLY auto dstReg1 = dstInfo.reg1;
1068 RegSet argRegs;
1069 for (size_t i = 0; i < args.numGpArgs(); i++) {
1070 PhysReg r(argReg(i));
1071 args.gpArg(i).setDstReg(r);
1072 argRegs.add(r);
1074 always_assert_flog(
1075 args.numStackArgs() == 0,
1076 "Stack arguments not yet supported on ARM: `{}'\n\n{}",
1077 *m_curInst, m_unit
1079 shuffleArgs(v, args, call);
1081 auto syncPoint = emitCall(v, call, argRegs);
1082 if (RuntimeOption::HHProfServerEnabled || sync != SyncOptions::kNoSyncPoint) {
1083 recordHostCallSyncPoint(v, syncPoint);
1086 auto* taken = m_curInst->taken();
1087 if (taken && taken->isCatch()) {
1088 auto& info = m_state.catches[taken];
1089 assert_not_implemented(args.numStackArgs() == 0);
1090 info.rspOffset = args.numStackArgs();
1091 auto next = v.makeBlock();
1092 v << hcunwind{syncPoint, {next, m_state.labels[taken]}};
1093 v = next;
1094 } else if (!m_curInst->is(Call, CallArray, ContEnter)) {
1095 v << hcnocatch{syncPoint};
1098 switch (dstInfo.type) {
1099 case DestType::TV: not_implemented();
1100 case DestType::SIMD: not_implemented();
1101 case DestType::SSA:
1102 assert(dstReg1 == InvalidReg);
1103 v << copy{PhysReg(vixl::x0), dstReg0};
1104 break;
1105 case DestType::None:
1106 assert(dstReg0 == InvalidReg && dstReg1 == InvalidReg);
1107 break;
1108 case DestType::Dbl:
1109 assert(dstReg1 == InvalidReg);
1110 v << copy{PhysReg(vixl::d0), dstReg0};
1111 break;
1115 CallDest CodeGenerator::callDest(Vreg reg0) const {
1116 return { DestType::SSA, reg0 };
1119 CallDest CodeGenerator::callDest(Vreg reg0, Vreg reg1) const {
1120 return { DestType::SSA, reg0, reg1 };
1124 * XXX copypasta below, but has to be in the class because of srcLoc()
1125 * and dstLoc(). Changing that would make callsites real messy.
1128 CallDest CodeGenerator::callDest(const IRInstruction* inst) const {
1129 if (!inst->numDsts()) return kVoidDest;
1130 auto loc = dstLoc(0);
1131 return { DestType::SSA, loc.reg(0), loc.reg(1) };
1134 CallDest CodeGenerator::callDestTV(const IRInstruction* inst) const {
1135 if (!inst->numDsts()) return kVoidDest;
1136 auto loc = dstLoc(0);
1137 if (loc.isFullSIMD()) {
1138 return { DestType::SIMD, loc.reg(0), InvalidReg };
1140 return { DestType::TV, loc.reg(0), loc.reg(1) };
1143 CallDest CodeGenerator::callDestDbl(const IRInstruction* inst) const {
1144 if (!inst->numDsts()) return kVoidDest;
1145 auto loc = dstLoc(0);
1146 return { DestType::Dbl, loc.reg(0), loc.reg(1) };
1149 //////////////////////////////////////////////////////////////////////
1151 static Vreg enregister(Vout& v, Vptr memRef) {
1152 auto r = v.makeReg();
1153 v << load{memRef, r};
1154 return r;
1157 static Vreg enregister(Vout& v, Vreg r) {
1158 return r;
1161 template<class Loc, class JmpFn>
1162 void CodeGenerator::emitTypeTest(Vout& v, Type type, Vreg typeReg, Loc dataSrc,
1163 Vreg sf, JmpFn doJcc) {
1164 assert(!(type <= Type::Cls));
1165 assert(typeReg.isVirt() || typeReg.isGP()); // expected W-type, ie 32-bit
1167 if (type.equals(Type::Gen)) {
1168 return;
1171 ConditionCode cc;
1172 if (type <= Type::Str) {
1173 // Note: ARM can actually do better here; it has a fused test-and-branch
1174 // instruction. The way this code is factored makes it difficult to use,
1175 // though; the jump instruction will be written by some other code.
1176 v << testli{KindOfStringBit, typeReg, sf};
1177 cc = CC_NE;
1178 } else if (type == Type::Null) {
1179 v << cmpli{KindOfNull, typeReg, sf};
1180 cc = CC_LE;
1181 } else if (type == Type::UncountedInit) {
1182 v << testli{KindOfUncountedInitBit, typeReg, sf};
1183 cc = CC_NE;
1184 } else if (type == Type::Uncounted) {
1185 v << cmpli{KindOfRefCountThreshold, typeReg, sf};
1186 cc = CC_LE;
1187 } else if (type == Type::Cell) {
1188 v << cmpli{KindOfRef, typeReg, sf};
1189 cc = CC_L;
1190 } else {
1191 assert(type.isKnownDataType());
1192 DataType dataType = type.toDataType();
1193 assert(dataType == KindOfRef ||
1194 (dataType >= KindOfUninit && dataType <= KindOfResource));
1195 v << cmpli{dataType, typeReg, sf};
1196 cc = CC_E;
1198 doJcc(cc);
1199 if (type < Type::Obj) {
1200 assert(type.getClass()->attrs() & AttrNoOverride);
1201 auto dataReg = enregister(v, dataSrc);
1202 auto vmclass = v.makeReg();
1203 emitLdLowPtr(v, vmclass, dataReg[ObjectData::getVMClassOffset()],
1204 sizeof(LowClassPtr));
1205 emitCmpClass(v, sf, vmclass, type.getClass());
1206 doJcc(CC_E);
1207 } else if (type < Type::Res) {
1208 CG_PUNT(TypeTest-on-Resource);
1209 } else if (type <= Type::Arr && type.hasArrayKind()) {
1210 auto dataReg = enregister(v, dataSrc);
1211 auto kind = v.makeReg();
1212 v << loadzbl{dataReg[ArrayData::offsetofKind()], kind};
1213 v << cmpli{type.getArrayKind(), kind, sf};
1214 doJcc(CC_E);
1218 void CodeGenerator::cgGuardLoc(IRInstruction* inst) {
1219 auto const rFP = srcLoc(0).reg();
1220 auto const baseOff = localOffset(inst->extra<GuardLoc>()->locId);
1221 auto& v = vmain();
1222 auto type = v.makeReg();
1223 v << loadzbl{rFP[baseOff + TVOFF(m_type)], type};
1224 auto const sf = v.makeReg();
1225 emitTypeTest(v, inst->typeParam(), type, rFP[baseOff + TVOFF(m_data)], sf,
1226 [&] (ConditionCode cc) {
1227 auto const destSK = SrcKey(curFunc(), m_unit.bcOff(), resumed());
1228 v << fallbackcc{ccNegate(cc), sf, destSK};
1232 void CodeGenerator::cgGuardStk(IRInstruction* inst) {
1233 auto const rSP = srcLoc(0).reg();
1234 auto const baseOff = cellsToBytes(inst->extra<GuardStk>()->offset);
1235 auto& v = vmain();
1236 auto type = v.makeReg();
1237 v << loadzbl{rSP[baseOff + TVOFF(m_type)], type};
1238 auto const sf = v.makeReg();
1239 emitTypeTest(v, inst->typeParam(), type, rSP[baseOff + TVOFF(m_data)], sf,
1240 [&] (ConditionCode cc) {
1241 auto const destSK = SrcKey(curFunc(), m_unit.bcOff(), resumed());
1242 v << fallbackcc{ccNegate(cc), sf, destSK};
1246 void CodeGenerator::cgCheckStk(IRInstruction* inst) {
1247 auto const rSP = srcLoc(0).reg();
1248 auto const baseOff = cellsToBytes(inst->extra<CheckStk>()->offset);
1249 auto& v = vmain();
1250 auto type = v.makeReg();
1251 v << loadzbl{rSP[baseOff + TVOFF(m_type)], type};
1252 auto const sf = v.makeReg();
1253 emitTypeTest(v, inst->typeParam(), type, rSP[baseOff + TVOFF(m_data)], sf,
1254 [&] (ConditionCode cc) {
1255 auto next = v.makeBlock();
1256 v << jcc{ccNegate(cc), sf, {next, label(inst->taken())}};
1257 v = next;
1262 void CodeGenerator::cgCheckType(IRInstruction* inst) {
1263 auto const src = inst->src(0);
1264 Type srcType = src->type();
1265 auto const rVal = srcLoc(0).reg(0);
1266 auto const rType = srcLoc(0).reg(1);
1267 auto& v = vmain();
1269 auto doMov = [&] {
1270 auto const valDst = dstLoc(0).reg(0);
1271 auto const typeDst = dstLoc(0).reg(1);
1272 v << copy{rVal, valDst};
1273 if (typeDst.isValid()) {
1274 if (rType.isValid()) {
1275 v << copy{rType, typeDst};
1276 } else {
1277 v << ldimm{srcType.toDataType(), typeDst};
1282 Type typeParam = inst->typeParam();
1283 if (src->isA(typeParam) ||
1284 // Boxed types are checked lazily, so there's nothing to be done here.
1285 (srcType.isBoxed() && typeParam.isBoxed())) {
1286 doMov();
1287 return;
1289 if (srcType.not(typeParam)) {
1290 v << jmp{label(inst->taken())};
1291 return;
1294 if (rType.isValid()) {
1295 auto const sf = v.makeReg();
1296 emitTypeTest(v, typeParam, rType, rVal, sf,
1297 [&] (ConditionCode cc) {
1298 auto next = v.makeBlock();
1299 v << jcc{ccNegate(cc), sf, {next, label(inst->taken())}};
1300 v = next;
1302 } else if (typeParam <= Type::Uncounted &&
1303 ((srcType == Type::Str && typeParam.maybe(Type::StaticStr)) ||
1304 (srcType == Type::Arr && typeParam.maybe(Type::StaticArr)))) {
1305 // We carry Str and Arr operands around without a type register, even
1306 // though they're union types. The static and non-static subtypes are
1307 // distinguised by the refcount field.
1308 assert(rVal.isValid());
1309 auto count = v.makeReg();
1310 auto next = v.makeBlock();
1311 v << loadl{rVal[FAST_REFCOUNT_OFFSET], count};
1312 v << tbcc{vixl::eq, UncountedBitPos, count, {next, label(inst->taken())}};
1313 v = next;
1314 } else {
1315 always_assert_log( false, [&] {
1316 return folly::format("Bad src: {} and dst: {} types in '{}'",
1317 srcType, typeParam, *inst).str();
1320 doMov();
1323 void CodeGenerator::cgSideExitGuardStk(IRInstruction* inst) {
1324 auto const sp = srcLoc(0).reg();
1325 auto const extra = inst->extra<SideExitGuardStk>();
1326 auto& v = vmain();
1328 auto type = v.makeReg();
1329 v << loadzbl{sp[cellsToBytes(extra->checkedSlot) + TVOFF(m_type)], type};
1330 auto const sf = v.makeReg();
1331 emitTypeTest(v, inst->typeParam(), type,
1332 sp[cellsToBytes(extra->checkedSlot) + TVOFF(m_data)], sf,
1333 [&] (ConditionCode cc) {
1334 auto const sk = SrcKey(curFunc(), extra->taken, resumed());
1335 v << bindexit{ccNegate(cc), sf, sk};
1340 template <class JmpFn>
1341 void CodeGenerator::emitReffinessTest(IRInstruction* inst, Vreg sf,
1342 JmpFn doJcc) {
1343 assert(inst->numSrcs() == 5);
1345 DEBUG_ONLY SSATmp* nParamsTmp = inst->src(1);
1346 DEBUG_ONLY SSATmp* firstBitNumTmp = inst->src(2);
1347 DEBUG_ONLY SSATmp* mask64Tmp = inst->src(3);
1348 DEBUG_ONLY SSATmp* vals64Tmp = inst->src(4);
1350 auto funcPtrLoc = srcLoc(0);
1351 auto nParamsLoc = srcLoc(1);
1352 auto mask64Loc = srcLoc(3);
1353 auto vals64Loc = srcLoc(4);
1355 // Get values in place
1356 auto funcPtrReg = funcPtrLoc.reg();
1357 assert(funcPtrReg.isValid());
1359 auto nParamsReg = nParamsLoc.reg();
1361 auto firstBitNum = static_cast<uint32_t>(firstBitNumTmp->intVal());
1362 auto mask64Reg = mask64Loc.reg();
1363 uint64_t mask64 = mask64Tmp->intVal();
1364 assert(mask64);
1366 auto vals64Reg = vals64Loc.reg();
1367 uint64_t vals64 = vals64Tmp->intVal();
1368 assert((vals64 & mask64) == vals64);
1370 auto thenBody = [&](Vout& v) {
1371 auto bitsOff = sizeof(uint64_t) * (firstBitNum / 64);
1372 auto cond = CC_NE;
1373 auto bitsPtrReg = v.makeReg();
1375 if (firstBitNum == 0) {
1376 bitsOff = Func::refBitValOff();
1377 bitsPtrReg = funcPtrReg;
1378 } else {
1379 v << load{funcPtrReg[Func::sharedOff()], bitsPtrReg};
1380 bitsOff -= sizeof(uint64_t);
1383 // Don't need the bits pointer after this point
1384 auto bits = v.makeReg();
1385 // Load the bits
1386 v << load{bitsPtrReg[bitsOff], bits};
1388 // Mask the bits. There are restrictions on what can be encoded as an
1389 // immediate in ARM's logical instructions, and if they're not met,
1390 // the assembler will compensate using ip0 or ip1 as tmps.
1391 auto masked = v.makeReg();
1392 v << andq{mask64Reg, bits, masked, v.makeReg()};
1394 // Now do the compare. There are also restrictions on immediates in
1395 // arithmetic instructions (of which Cmp is one; it's just a subtract that
1396 // sets flags), so same deal as with the mask immediate above.
1397 v << cmpq{vals64Reg, masked, sf};
1398 doJcc(cond, sf);
1401 auto& v = vmain();
1402 if (firstBitNum == 0) {
1403 assert(nParamsTmp->isConst());
1404 // This is the first 64 bits. No need to check nParams.
1405 thenBody(v);
1406 } else {
1407 // Check number of args...
1408 auto const sf2 = v.makeReg();
1409 v << cmpq{v.cns(firstBitNum), nParamsReg, sf2};
1411 if (vals64 != 0 && vals64 != mask64) {
1412 // If we're beyond nParams, then either all params
1413 // are refs, or all params are non-refs, so if vals64
1414 // isn't 0 and isnt mask64, there's no possibility of
1415 // a match
1416 doJcc(CC_LE, sf2);
1417 thenBody(v);
1418 } else {
1419 ifThenElse(v, CC_G, sf2, thenBody, /* else */ [&](Vout& v) {
1420 // If not special builtin...
1421 static_assert(sizeof(HPHP::Attr) == 4, "");
1422 auto attr = v.makeReg();
1423 v << loadl{funcPtrReg[Func::attrsOff()], attr};
1424 auto const sf = v.makeReg();
1425 v << testli{AttrVariadicByRef, attr, sf};
1426 doJcc(vals64 ? CC_Z : CC_NZ, sf);
1432 void CodeGenerator::cgGuardRefs(IRInstruction* inst) {
1433 auto& v = vmain();
1434 auto const sf = v.makeReg();
1435 emitReffinessTest(inst, sf,
1436 [&](ConditionCode cc, Vreg sfTaken) {
1437 auto const destSK = SrcKey(curFunc(), inst->marker().bcOff(), resumed());
1438 v << fallbackcc{cc, sfTaken, destSK};
1442 void CodeGenerator::cgCheckRefs(IRInstruction* inst) {
1443 auto& v = vmain();
1444 auto const sf = v.makeReg();
1445 emitReffinessTest(inst, sf,
1446 [&](ConditionCode cc, Vreg sfTaken) {
1447 auto next = v.makeBlock();
1448 v << jcc{cc, sfTaken, {next, label(inst->taken())}};
1449 v = next;
1453 //////////////////////////////////////////////////////////////////////
1455 void CodeGenerator::cgSyncABIRegs(IRInstruction* inst) {
1456 auto& v = vmain();
1457 v << copy{srcLoc(0).reg(), PhysReg(rVmFp)};
1458 v << copy{srcLoc(1).reg(), PhysReg(rVmSp)};
1461 void CodeGenerator::cgReqBindJmp(IRInstruction* inst) {
1462 auto to = SrcKey(curFunc(), inst->extra<ReqBindJmp>()->offset, resumed());
1463 vmain() << bindjmp{to};
1466 void CodeGenerator::cgReqRetranslate(IRInstruction* inst) {
1467 assert(m_unit.bcOff() == inst->marker().bcOff());
1468 auto const destSK = SrcKey(curFunc(), m_unit.bcOff(), resumed());
1469 auto& v = vmain();
1470 v << fallback{destSK};
1473 void CodeGenerator::cgSpillFrame(IRInstruction* inst) {
1474 auto const func = inst->src(1);
1475 auto const objOrCls = inst->src(2);
1476 auto const invName = inst->extra<SpillFrame>()->invName;
1477 auto const nArgs = inst->extra<SpillFrame>()->numArgs;
1479 auto spReg = srcLoc(0).reg();
1480 auto funcLoc = srcLoc(1);
1481 auto objClsReg = srcLoc(2).reg();
1482 ptrdiff_t spOff = -kNumActRecCells * sizeof(Cell);
1483 auto& v = vmain();
1485 v << storel{v.cns(nArgs), spReg[spOff + AROFF(m_numArgsAndFlags)]};
1487 // Magic-call name.
1488 if (invName) {
1489 auto bits = reinterpret_cast<uintptr_t>(invName) | ActRec::kInvNameBit;
1490 v << store{v.cns(bits), spReg[spOff + AROFF(m_invName)]};
1491 } else {
1492 v << store{PhysReg(vixl::xzr), spReg[spOff + AROFF(m_invName)]};
1495 // Func and this/class are slightly tricky. The func may be a tuple of a Func*
1496 // and context.
1498 if (objOrCls->isA(Type::Cls)) {
1499 if (objOrCls->isConst()) {
1500 v << store{v.cns(objOrCls->rawVal() | 1), spReg[spOff + AROFF(m_this)]};
1501 } else {
1502 auto ctx = v.makeReg();
1503 v << orqi{1, objClsReg, ctx, v.makeReg()};
1504 v << store{ctx, spReg[spOff + AROFF(m_this)]};
1506 } else if (objOrCls->isA(Type::Obj) || objOrCls->isA(Type::Ctx)) {
1507 v << store{objClsReg, spReg[spOff + AROFF(m_this)]};
1508 } else {
1509 assert(objOrCls->isA(Type::Nullptr));
1510 v << store{PhysReg(vixl::xzr), spReg[spOff + AROFF(m_this)]};
1513 // Now set func, and possibly this/cls
1514 if (!func->isA(Type::Nullptr)) {
1515 auto func = funcLoc.reg(0);
1516 v << store{func, spReg[spOff + AROFF(m_func)]};
1519 // Adjust stack pointer
1520 v << addqi{safe_cast<int32_t>(spOff), spReg, dstLoc(0).reg(), v.makeReg()};
1523 //////////////////////////////////////////////////////////////////////
1525 void CodeGenerator::cgCallBuiltin(IRInstruction* inst) {
1526 auto const func = inst->extra<CallBuiltinData>()->callee;
1527 auto const numArgs = func->numParams();
1528 auto const funcReturnType = func->returnType();
1529 int returnOffset = MISOFF(tvBuiltinReturn);
1530 auto& v = vmain();
1532 if (FixupMap::eagerRecord(func)) {
1533 // Save VM registers
1534 PhysReg vmfp(rVmFp), vmsp(rVmSp), rds(rVmTl);
1535 auto const* pc = curFunc()->unit()->entry() + m_curInst->marker().bcOff();
1536 v << store{vmfp, rds[RDS::kVmfpOff]};
1537 v << store{vmsp, rds[RDS::kVmspOff]};
1538 v << store{v.cns(pc), rds[RDS::kVmpcOff]};
1541 // The stack pointer currently points to the MInstrState we need to use.
1542 PhysReg sp(vixl::sp); // C++ sp, not vmsp
1543 auto mis = v.makeReg();
1544 v << copy{sp, mis};//XXX why do this copy?
1546 auto callArgs = argGroup();
1547 if (isCppByRef(funcReturnType)) {
1548 if (isSmartPtrRef(funcReturnType)) {
1549 // first arg is pointer to storage for the return value
1550 returnOffset += TVOFF(m_data);
1552 callArgs.addr(mis, returnOffset);
1555 auto srcNum = uint32_t{0};
1556 if (func->isMethod()) {
1557 callArgs.ssa(srcNum);
1558 ++srcNum;
1560 for (auto i = uint32_t{0}; i < numArgs; ++i, ++srcNum) {
1561 auto const& pi = func->params()[i];
1562 if (TVOFF(m_data) && isSmartPtrRef(pi.builtinType)) {
1563 callArgs.addr(srcLoc(srcNum).reg(), TVOFF(m_data));
1564 } else {
1565 callArgs.ssa(srcNum);
1569 auto dst = dstLoc(0).reg(0);
1570 auto dstType = dstLoc(0).reg(1);
1572 if (callArgs.numStackArgs() != 0) {
1573 CG_PUNT(cgCallBuiltin-StackArgs);
1575 cgCallHelper(v,
1576 CppCall::direct(func->nativeFuncPtr()),
1577 isCppByRef(funcReturnType) ? kVoidDest : callDest(dst),
1578 SyncOptions::kSyncPoint,
1579 callArgs);
1581 auto returnType = inst->typeParam();
1582 if (!dst.isValid() || returnType.isSimpleType()) {
1583 return;
1586 mis = sp;
1587 if (returnType.isReferenceType()) {
1588 // this should use some kind of cmov
1589 assert(isCppByRef(funcReturnType) && isSmartPtrRef(funcReturnType));
1590 v << load{mis[returnOffset + TVOFF(m_data)], dst};
1591 condZero(v, dst, dstType, [&](Vout& v) {
1592 return v.cns(KindOfNull);
1593 }, [&](Vout& v) {
1594 return v.cns(returnType.toDataType());
1596 return;
1599 if (returnType <= Type::Cell || returnType <= Type::BoxedCell) {
1600 // this should use some kind of cmov
1601 static_assert(KindOfUninit == 0, "KindOfUninit must be 0 for test");
1602 assert(isCppByRef(funcReturnType) && !isSmartPtrRef(funcReturnType));
1603 auto tmp_dst_type = v.makeReg();
1604 v << load{mis[returnOffset + TVOFF(m_data)], dst};
1605 v << loadzbl{mis[returnOffset + TVOFF(m_type)], tmp_dst_type};
1606 condZero(v, tmp_dst_type, dstType, [&](Vout& v) {
1607 return v.cns(KindOfNull);
1608 }, [&](Vout& v) {
1609 return tmp_dst_type;
1611 return;
1614 always_assert(false);
1617 void CodeGenerator::cgCall(IRInstruction* inst) {
1618 auto const extra = inst->extra<Call>();
1619 auto const rSP = srcLoc(0).reg();
1620 auto const rFP = srcLoc(1).reg();
1621 auto const ar = extra->numParams * sizeof(TypedValue);
1622 auto const srcKey = m_curInst->marker().sk();
1623 auto& v = vmain();
1624 v << store{rFP, rSP[ar + AROFF(m_sfp)]};
1625 v << storel{v.cns(extra->after), rSP[ar + AROFF(m_soff)]};
1626 v << bindcall{srcKey, extra->callee, extra->numParams};
1627 assert(dstLoc(0).reg() == PhysReg(rVmSp)); // bindcall will adjust vmsp
1630 //////////////////////////////////////////////////////////////////////
1632 void CodeGenerator::cgBeginCatch(IRInstruction* inst) {
1633 UNUSED auto const& info = m_state.catches[inst->block()];
1634 assert(info.rspOffset == 0); // stack args not supported yet
1637 static void unwindResumeHelper() {
1638 // We don't have this sorted out for native mode yet
1639 always_assert(RuntimeOption::EvalSimulateARM);
1641 tl_regState = VMRegState::CLEAN;
1642 g_context->m_activeSims.back()->resume_last_exception();
1645 void CodeGenerator::cgEndCatch(IRInstruction* inst) {
1646 emitCall(vmain(), CppCall::direct(unwindResumeHelper), RegSet{});
1649 //////////////////////////////////////////////////////////////////////
1651 void CodeGenerator::emitLoadTypedValue(Vout& v, Vloc dst, Vreg base,
1652 ptrdiff_t offset, Block* label) {
1653 if (label) not_implemented();
1654 if (dst.isFullSIMD()) not_implemented();
1656 auto valueDst = dst.reg(0);
1657 auto typeDst = dst.reg(1);
1659 // Avoid clobbering the base reg if we'll need it later
1660 if (base == typeDst && valueDst.isValid()) {
1661 auto tmp = v.makeReg();
1662 v << copy{base, tmp};
1663 base = tmp;
1666 if (typeDst.isValid()) {
1667 v << loadzbl{base[offset + TVOFF(m_type)], typeDst};
1670 if (valueDst.isValid()) {
1671 v << load{base[offset + TVOFF(m_data)], valueDst};
1675 void CodeGenerator::emitStoreTypedValue(Vout& v, Vreg base, ptrdiff_t offset,
1676 Vloc src) {
1677 assert(src.numWords() == 2);
1678 auto reg0 = src.reg(0);
1679 auto reg1 = src.reg(1);
1680 v << store{reg0, base[offset + TVOFF(m_data)]};
1681 v << storeb{reg1, base[offset + TVOFF(m_type)]};
1684 void CodeGenerator::emitLoad(Vout& v, Type type, Vloc dst, Vreg base,
1685 ptrdiff_t offset, Block* label /* = nullptr */) {
1686 if (type.needsReg()) {
1687 return emitLoadTypedValue(v, dst, base, offset, label);
1689 if (label) {
1690 not_implemented();
1692 auto data = dst.reg();
1693 v << load{base[offset + TVOFF(m_data)], data};
1696 void CodeGenerator::emitStore(Vout& v, Vreg base, ptrdiff_t offset,
1697 SSATmp* src, Vloc srcLoc,
1698 bool genStoreType /* = true */) {
1699 auto type = src->type();
1700 if (type.needsReg()) {
1701 return emitStoreTypedValue(v, base, offset, srcLoc);
1703 if (genStoreType) {
1704 auto dt = type.toDataType();
1705 v << storeb{v.cns(dt), base[offset + TVOFF(m_type)]};
1707 if (type <= Type::Null) {
1708 return;
1711 auto data = srcLoc.reg();
1712 if (src->isA(Type::Bool)) {
1713 auto extended = v.makeReg();
1714 v << movzbl{data, extended};
1715 data = extended;
1717 v << store{data, base[offset + TVOFF(m_data)]};
1720 void CodeGenerator::cgLdLoc(IRInstruction* inst) {
1721 auto base = srcLoc(0).reg();
1722 auto offset = localOffset(inst->extra<LdLoc>()->locId);
1723 emitLoad(vmain(), inst->dst()->type(), dstLoc(0), base, offset);
1726 void CodeGenerator::cgStLocWork(IRInstruction* inst) {
1727 auto base = srcLoc(0).reg();
1728 auto offset = localOffset(inst->extra<LocalId>()->locId);
1729 emitStore(vmain(), base, offset, inst->src(1), srcLoc(1),
1730 true /* store type */);
1733 void CodeGenerator::cgStLoc(IRInstruction* inst) { cgStLocWork(inst); }
1734 void CodeGenerator::cgStGbl(IRInstruction* inst) { cgStLocWork(inst); }
1736 void CodeGenerator::cgLdStack(IRInstruction* inst) {
1737 assert(inst->taken() == nullptr);
1738 auto src = srcLoc(0).reg();
1739 auto offset = cellsToBytes(inst->extra<LdStack>()->offset);
1740 emitLoad(vmain(), inst->dst()->type(), dstLoc(0), src, offset);
1743 void CodeGenerator::emitLdRaw(IRInstruction* inst, size_t extraOff) {
1744 auto dest = dstLoc(0).reg();
1745 auto offset = inst->extra<RawMemData>()->info().offset;
1746 auto ptr = srcLoc(0).reg()[offset + extraOff];
1747 auto& v = vmain();
1748 switch (inst->extra<RawMemData>()->info().size) {
1749 case sz::byte: v << loadzbl{ptr, dest}; break;
1750 case sz::dword: v << loadl{ptr, dest}; break;
1751 case sz::qword: v << load{ptr, dest}; break;
1752 default: not_implemented();
1756 void CodeGenerator::cgLdRaw(IRInstruction* inst) {
1757 emitLdRaw(inst, 0);
1760 void CodeGenerator::cgLdContArRaw(IRInstruction* inst) {
1761 emitLdRaw(inst, -c_Generator::arOff());
1764 void CodeGenerator::cgLdARFuncPtr(IRInstruction* inst) {
1765 auto dst = dstLoc(0).reg();
1766 auto base = srcLoc(0).reg();
1767 auto offset = inst->src(1)->intVal();
1768 vmain() << load{base[offset + AROFF(m_func)], dst};
1771 void CodeGenerator::cgLdFuncCached(IRInstruction* inst) {
1772 auto dst = dstLoc(0).reg();
1773 auto const name = inst->extra<LdFuncCachedData>()->name;
1774 auto const ch = NamedEntity::get(name)->getFuncHandle();
1775 PhysReg rds(rVmTl);
1776 auto& v = vmain();
1778 auto dst1 = v.makeReg();
1779 v << load{rds[ch], dst1};
1780 condZero(v, dst1, dst, [&](Vout& v) {
1781 auto dst2 = v.makeReg();
1782 const Func* (*const func)(const StringData*) = lookupUnknownFunc;
1783 cgCallHelper(v, CppCall::direct(func),
1784 callDest(dst2),
1785 SyncOptions::kSyncPoint,
1786 argGroup().immPtr(inst->extra<LdFuncCached>()->name));
1787 return dst2;
1788 }, [&](Vout& v) {
1789 return dst1;
1793 void CodeGenerator::cgLdStackAddr(IRInstruction* inst) {
1794 auto const dst = dstLoc(0).reg();
1795 auto const base = srcLoc(0).reg();
1796 auto const offset = cellsToBytes(inst->extra<LdStackAddr>()->offset);
1797 vmain() << lea{base[offset], dst};
1800 void CodeGenerator::cgSpillStack(IRInstruction* inst) {
1801 // TODO(2966414): so much of this logic could be shared. The opcode itself
1802 // should probably be broken up.
1803 auto const spDeficit = inst->src(1)->intVal();
1804 auto const spillVals = inst->srcs().subpiece(2);
1805 auto const numSpillSrcs = spillVals.size();
1806 auto const dst = dstLoc(0).reg();
1807 auto const sp = srcLoc(0).reg();
1808 auto const spillCells = spillValueCells(inst);
1809 auto& v = vmain();
1810 ptrdiff_t adjustment = (spDeficit - spillCells) * sizeof(Cell);
1811 for (uint32_t i = 0; i < numSpillSrcs; ++i) {
1812 const ptrdiff_t offset = i * sizeof(Cell) + adjustment;
1813 emitStore(v, sp, offset, spillVals[i], srcLoc(i + 2));
1815 v << addqi{safe_cast<int32_t>(adjustment), sp, dst, v.makeReg()};
1818 void CodeGenerator::cgInterpOneCommon(IRInstruction* inst) {
1819 auto pcOff = inst->extra<InterpOneData>()->bcOff;
1821 auto opc = *(curFunc()->unit()->at(pcOff));
1822 auto* interpOneHelper = interpOneEntryPoints[opc];
1824 cgCallHelper(vmain(),
1825 CppCall::direct(reinterpret_cast<void (*)()>(interpOneHelper)),
1826 kVoidDest,
1827 SyncOptions::kSyncPoint,
1828 argGroup().ssa(1/*fp*/).ssa(0/*sp*/).imm(pcOff));
1831 void CodeGenerator::cgInterpOne(IRInstruction* inst) {
1832 cgInterpOneCommon(inst);
1833 auto const& extra = *inst->extra<InterpOne>();
1834 auto newSp = dstLoc(0).reg();
1835 auto spAdjustBytes = cellsToBytes(extra.cellsPopped - extra.cellsPushed);
1836 auto& v = vmain();
1837 v << addqi{spAdjustBytes, newSp, newSp, v.makeReg()};
1840 void CodeGenerator::cgInterpOneCF(IRInstruction* inst) {
1841 cgInterpOneCommon(inst);
1842 auto& v = vmain();
1843 PhysReg rds(rVmTl), fp(rVmFp), sp(rVmSp);
1844 v << load{rds[RDS::kVmfpOff], fp};
1845 v << load{rds[RDS::kVmspOff], sp};
1846 v << resume{};
1849 void CodeGenerator::cgLdClsName(IRInstruction* inst) {
1850 auto const dst = dstLoc(0).reg();
1851 auto const src = srcLoc(0).reg();
1852 auto& v = vmain();
1853 auto preclass = v.makeReg();
1854 v << load{src[Class::preClassOff()], preclass};
1855 v << load{preclass[PreClass::nameOffset()], dst};
1858 //////////////////////////////////////////////////////////////////////
1860 void CodeGenerator::cgCountArrayFast(IRInstruction* inst) {
1861 auto const array = srcLoc(0).reg();
1862 auto const size = dstLoc(0).reg();
1863 vmain() << loadl{array[ArrayData::offsetofSize()], size};
1866 void CodeGenerator::cgCountCollection(IRInstruction* inst) {
1867 auto const collection = srcLoc(0).reg();
1868 auto const size = dstLoc(0).reg();
1869 vmain() << loadl{collection[FAST_COLLECTION_SIZE_OFFSET], size};
1872 //////////////////////////////////////////////////////////////////////
1874 void CodeGenerator::cgInst(IRInstruction* inst) {
1875 assert(!m_curInst && m_slocs.empty() && m_dlocs.empty());
1876 m_curInst = inst;
1877 SCOPE_EXIT {
1878 m_curInst = nullptr;
1879 m_slocs.clear();
1880 m_dlocs.clear();
1882 for (auto s : inst->srcs()) {
1883 m_slocs.push_back(m_state.locs[s]);
1884 assert(m_slocs.back().reg(0).isValid());
1886 for (auto& d : inst->dsts()) {
1887 m_dlocs.push_back(m_state.locs[d]);
1888 assert(m_dlocs.back().reg(0).isValid());
1891 switch (inst->op()) {
1892 #define O(name, dsts, srcs, flags) \
1893 case name: FTRACE(7, "cg" #name "\n"); \
1894 cg ## name (inst); \
1895 break;
1896 IR_OPCODES
1897 #undef O
1898 default:
1899 always_assert(false);
1901 auto& v = vmain();
1902 if (inst->isBlockEnd() && !v.closed()) {
1903 if (auto next = inst->next()) {
1904 v << jmp{m_state.labels[next]};
1905 } else {
1906 v << brk{0}; // or end?